From 93d6d36eb697cd9452eb4aab446151a1a33ed245 Mon Sep 17 00:00:00 2001 From: Adam Mathes Date: Mon, 23 Jan 2017 20:04:03 -0800 Subject: neko v2 initial commit --- crawler/crawler.go | 63 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 crawler/crawler.go (limited to 'crawler') diff --git a/crawler/crawler.go b/crawler/crawler.go new file mode 100644 index 0000000..e3e4aeb --- /dev/null +++ b/crawler/crawler.go @@ -0,0 +1,63 @@ +package crawler + +import ( + "log" + "neko/models/feed" + "neko/models/item" + "net/http" + "time" + "github.com/SlyMarbo/rss" +) + + +func Crawl() { + + ch := make(chan string) + + feeds,err := feed.All() + if err != nil { + log.Fatal(err) + } + for _, f := range feeds { + log.Printf("crawling %s", f.Url) + go CrawlFeed(f, ch) + } + + for i := 0; i < len(feeds); i++ { + log.Println(<-ch) + } +} + +/* + TODO: sanitize input on crawl +*/ +func CrawlFeed(f *feed.Feed, ch chan<- string) { + c := &http.Client{ + // give up after 5 seconds + Timeout: 5 * time.Second, + } + + feed, err := rss.FetchByClient(f.Url, c) + if err != nil { + log.Print(err) + ch <- "failed to fetch and parse for " + f.Url + return + } + + f.Title = feed.Title + f.Update() + + for _, i := range feed.Items { + log.Printf("storing item: %s", i.Title) + var item item.Item + item.Title = i.Title + item.Url = i.Link + item.Description = i.Content + if item.Description == "" { + item.Description = i.Summary + } + item.FeedId = f.Id + item.Create() + } + ch <- "successfully crawled " + f.Url +} -- cgit v1.2.3