package main import ( "encoding/csv" "encoding/json" "fmt" "git.fuyu.moe/Tracreed/compareID" "io" "io/ioutil" "net/http" "regexp" "strconv" "strings" "time" ) func check(e error) { if e != nil { panic(e) } } type Result struct { EndDate string `json:"end_date"` Episodes int `json:"total_episodes"` ID int `json:"id"` StartDate string `json:"start_date"` Title string `json:"title_english"` Type string `json:"type"` } //fixAnidbDates checks if a string slice with the length of two have matching dates func FixAnidbDates(Dates []string) (startDate, endDate time.Time) { if !strings.ContainsAny(Dates[0], "?") && Dates[0] != "" { startDate, _ = time.Parse(`02.01.2006`, Dates[0]) } if len(Dates) == 2 { if !strings.ContainsAny(Dates[1], "?") && Dates[1] != "" { endDate, _ = time.Parse(`02.01.2006`, Dates[1]) } } return startDate, endDate } func ParseCsv(jobs chan compareID.Anime, c chan compareID.MatchedAnime, sleep time.Duration) { dat, err := ioutil.ReadFile("/home/trac/coding/compareID/1.-Main-data.csv") check(err) r := csv.NewReader(strings.NewReader(string(dat))) _, err = r.Read() check(err) for { record, err := r.Read() if err == io.EOF { break } EpRegex := regexp.MustCompile(`.*, (\d*) .*`) ep := strings.Split(record[11], ",") eps := 1 episodes, err := strconv.Atoi(EpRegex.ReplaceAllString(record[11], "$1")) if err == nil { eps = episodes } id, err := strconv.Atoi(record[1]) check(err) Date := strings.Split(record[12], " till ") startDate, endDate := FixAnidbDates(Date) a := compareID.Anime{ StartDate: startDate, EndDate: endDate, ID: id, Type: strings.TrimSpace(ep[0]), Episodes: eps, Title: strings.Replace(record[3], "Anime: ", "", 1), } if a.Title == "ERROR" || a.Title == "?" { continue } jobs <- a //go compareID.CheckResults(a, c) //time.Sleep(sleep * time.Millisecond) } fmt.Println("Finished parsing csv") } func main() { http.DefaultTransport = &http.Transport{MaxIdleConnsPerHost: 50, MaxIdleConns: 50} c := make(chan compareID.MatchedAnime) jobs := make(chan compareID.Anime) var animes compareID.Animes /*client := &http.Client{} req, err := http.NewRequest("GET", "https://anilist.co/api/anime/21898/page", nil) req.Header.Add("x-csrf-token", `jN6gFN0UdzpxSrZUHcT8RW6h4hJpgqKv85la63ON`) req.Header.Add(`cookie`, `__cfduid=d254ce47b42a2a6eb3428bea8dbd6dfec1508141286; XSRF-TOKEN=jN6gFN0UdzpxSrZUHcT8RW6h4hJpgqKv85la63ON; laravel_session=eyJpdiI6Im5ENmxURHQwdWt4WldJelc5clBUbXc9PSIsInZhbHVlIjoid2NnakM0RGMwRDR3K09pelM3M045SlJSNm5tRnk1Y0cwQThcL01xTkhva0hLd2tEOElwcDBYWjdWWXNPUTVzWnpXVk5uaUJWb3o4WnBvaFpsWWZFQkFnPT0iLCJtYWMiOiJlODlkMmJiNzQ1ZTg3NjM0NDFiZTljNGVhZDQzZGRlMDhhNWMyNWNiMmIwZWYwZjI5MTg3ODk0YjRmYWVmYTA4In0%3D`) resp, err := client.Do(req) check(err) // nolint: errcheck defer resp.Body.Close() var result Result body, err := ioutil.ReadAll(resp.Body) check(err) _ = json.NewDecoder(bytes.NewReader(body)).Decode(&result)*/ for i := 1; i <= 50; i++ { go compareID.CheckResults(jobs, c) } go ParseCsv(jobs, c, 7) //fmt.Println(result) /* a := compareID.Anime{ StartDate: result.StartDate, EndDate: result.EndDate, ID: result.ID, Type: result.Type, Episodes: result.Episodes, Title: result.Title, } go compareID.CheckResults(a, c)*/ MainLoop: for { select { case m := <-c: animes = append(animes, m) case <-time.After(2 * time.Second): break MainLoop } } //fmt.Println(animes) resJSON, err := json.MarshalIndent(animes, "", "\t") check(err) jsonfile := []byte(resJSON) err = ioutil.WriteFile("./result.json", jsonfile, 0644) check(err) }