added worker code

This commit is contained in:
David Alasow 2018-11-20 17:22:09 +01:00
parent be663bf1f1
commit 173cadabea
3 changed files with 54813 additions and 55 deletions

View File

@ -52,7 +52,7 @@ type MatchedAnime struct {
Score int Score int
} }
//check does a default error check //Check does a default error check
func check(e error) { func check(e error) {
if e != nil { if e != nil {
panic(e) panic(e)
@ -115,15 +115,17 @@ func SubAnimeDates(Date, Date2 time.Time) (total int) {
//checkResults takes an Anime struct and a channel. //checkResults takes an Anime struct and a channel.
//It compares the supplied anime to all the search results returned from searching the title. //It compares the supplied anime to all the search results returned from searching the title.
//If the score is over 100 it should be a match. //If the score is over 100 it should be a match.
func CheckResults(anime Anime, c chan MatchedAnime) { func CheckResults(jobs chan Anime, c chan MatchedAnime) {
for anime := range jobs {
fmt.Println(`Searching anime`, anime.ID) fmt.Println(`Searching anime`, anime.ID)
var highest, hI int var highest, hI int
var err error var err error
anime.Title = strings.Replace(strings.Replace(anime.Title, `\`, `\\`, -1), `"`, `\"`, -1) anime.Title = strings.Replace(strings.Replace(anime.Title, `\`, `\\`, -1), `"`, `\"`, -1)
var search = bytes.NewBuffer([]byte(`{"title":"` + anime.Title + `", "show_r18": true}`)) var search = bytes.NewBuffer([]byte(`{"title":"` + anime.Title + `", "show_r18": true}`))
resp, err := http.Post("https://api.meikan.moe/v1/anime?incl=start_date,end_date", "application/json", search) resp, err := http.Post("https://api.meikan.moe/v1/anime", "application/json", search)
check(err) check(err)
fmt.Println(`Completed meikan search for`, anime.ID) fmt.Println(`Completed meikan search for`, anime.ID)
//fmt.Println(resp)
// nolint: errcheck // nolint: errcheck
defer resp.Body.Close() defer resp.Body.Close()
var result Result var result Result
@ -164,6 +166,7 @@ func CheckResults(anime Anime, c chan MatchedAnime) {
} }
} }
//return highest, hI //return highest, hI
//fmt.Println(len(jobs))
if len(result.Anime) != 0 { if len(result.Anime) != 0 {
c <- MatchedAnime{ c <- MatchedAnime{
MeikanTitle: result.Anime[hI].Title, MeikanTitle: result.Anime[hI].Title,
@ -174,6 +177,7 @@ func CheckResults(anime Anime, c chan MatchedAnime) {
} }
} }
} }
}
func main() { func main() {
http.DefaultTransport = &http.Transport{MaxIdleConnsPerHost: 50, MaxIdleConns: 50} http.DefaultTransport = &http.Transport{MaxIdleConnsPerHost: 50, MaxIdleConns: 50}

138
thing/main.go Normal file
View File

@ -0,0 +1,138 @@
package main
import (
"encoding/csv"
"encoding/json"
"fmt"
"git.fuyu.moe/Tracreed/compareID"
"io"
"io/ioutil"
"net/http"
"regexp"
"strconv"
"strings"
"time"
)
func check(e error) {
if e != nil {
panic(e)
}
}
type Result struct {
EndDate string `json:"end_date"`
Episodes int `json:"total_episodes"`
ID int `json:"id"`
StartDate string `json:"start_date"`
Title string `json:"title_english"`
Type string `json:"type"`
}
//fixAnidbDates checks if a string slice with the length of two have matching dates
func FixAnidbDates(Dates []string) (startDate, endDate time.Time) {
if !strings.ContainsAny(Dates[0], "?") && Dates[0] != "" {
startDate, _ = time.Parse(`02.01.2006`, Dates[0])
}
if len(Dates) == 2 {
if !strings.ContainsAny(Dates[1], "?") && Dates[1] != "" {
endDate, _ = time.Parse(`02.01.2006`, Dates[1])
}
}
return startDate, endDate
}
func ParseCsv(jobs chan compareID.Anime, c chan compareID.MatchedAnime, sleep time.Duration) {
dat, err := ioutil.ReadFile("/home/trac/coding/compareID/1.-Main-data.csv")
check(err)
r := csv.NewReader(strings.NewReader(string(dat)))
_, err = r.Read()
check(err)
for {
record, err := r.Read()
if err == io.EOF {
break
}
EpRegex := regexp.MustCompile(`.*, (\d*) .*`)
ep := strings.Split(record[11], ",")
eps := 1
episodes, err := strconv.Atoi(EpRegex.ReplaceAllString(record[11], "$1"))
if err == nil {
eps = episodes
}
id, err := strconv.Atoi(record[1])
check(err)
Date := strings.Split(record[12], " till ")
startDate, endDate := FixAnidbDates(Date)
a := compareID.Anime{
StartDate: startDate,
EndDate: endDate,
ID: id,
Type: strings.TrimSpace(ep[0]),
Episodes: eps,
Title: strings.Replace(record[3], "Anime: ", "", 1),
}
if a.Title == "ERROR" || a.Title == "?" {
continue
}
jobs <- a
//go compareID.CheckResults(a, c)
//time.Sleep(sleep * time.Millisecond)
}
fmt.Println("Finished parsing csv")
}
func main() {
http.DefaultTransport = &http.Transport{MaxIdleConnsPerHost: 50, MaxIdleConns: 50}
c := make(chan compareID.MatchedAnime)
jobs := make(chan compareID.Anime)
var animes compareID.Animes
/*client := &http.Client{}
req, err := http.NewRequest("GET", "https://anilist.co/api/anime/21898/page", nil)
req.Header.Add("x-csrf-token", `jN6gFN0UdzpxSrZUHcT8RW6h4hJpgqKv85la63ON`)
req.Header.Add(`cookie`, `__cfduid=d254ce47b42a2a6eb3428bea8dbd6dfec1508141286; XSRF-TOKEN=jN6gFN0UdzpxSrZUHcT8RW6h4hJpgqKv85la63ON; laravel_session=eyJpdiI6Im5ENmxURHQwdWt4WldJelc5clBUbXc9PSIsInZhbHVlIjoid2NnakM0RGMwRDR3K09pelM3M045SlJSNm5tRnk1Y0cwQThcL01xTkhva0hLd2tEOElwcDBYWjdWWXNPUTVzWnpXVk5uaUJWb3o4WnBvaFpsWWZFQkFnPT0iLCJtYWMiOiJlODlkMmJiNzQ1ZTg3NjM0NDFiZTljNGVhZDQzZGRlMDhhNWMyNWNiMmIwZWYwZjI5MTg3ODk0YjRmYWVmYTA4In0%3D`)
resp, err := client.Do(req)
check(err)
// nolint: errcheck
defer resp.Body.Close()
var result Result
body, err := ioutil.ReadAll(resp.Body)
check(err)
_ = json.NewDecoder(bytes.NewReader(body)).Decode(&result)*/
for i := 1; i <= 50; i++ {
go compareID.CheckResults(jobs, c)
}
go ParseCsv(jobs, c, 7)
//fmt.Println(result)
/*
a := compareID.Anime{
StartDate: result.StartDate,
EndDate: result.EndDate,
ID: result.ID,
Type: result.Type,
Episodes: result.Episodes,
Title: result.Title,
}
go compareID.CheckResults(a, c)*/
MainLoop:
for {
select {
case m := <-c:
animes = append(animes, m)
case <-time.After(2 * time.Second):
break MainLoop
}
}
//fmt.Println(animes)
resJSON, err := json.MarshalIndent(animes, "", "\t")
check(err)
jsonfile := []byte(resJSON)
err = ioutil.WriteFile("./result.json", jsonfile, 0644)
check(err)
}

54616
thing/result.json Normal file

File diff suppressed because it is too large Load Diff