aboutsummaryrefslogtreecommitdiffstats
path: root/crawler/crawler.go
blob: e3e4aeb62af51fc7b02fb4eaddaf099051ca7aad (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
package crawler

import (
	"log"
	"neko/models/feed"
	"neko/models/item"
	"net/http"
	"time"
	"github.com/SlyMarbo/rss"
)


func Crawl() {

	ch := make(chan string)

	feeds,err := feed.All()
	if err != nil {
		log.Fatal(err)
	}
	for _, f := range feeds {
		log.Printf("crawling %s", f.Url)
		go CrawlFeed(f, ch)
	}

	for i := 0; i < len(feeds); i++ {
		log.Println(<-ch)
	}
}

/*
 TODO: sanitize input on crawl
*/
func CrawlFeed(f *feed.Feed, ch chan<- string) {
	c := &http.Client{
		// give up after 5 seconds
		Timeout: 5 * time.Second,
	}

	feed, err := rss.FetchByClient(f.Url, c)
	if err != nil {
		log.Print(err)
		ch <- "failed to fetch and parse for " + f.Url
		return
	}

	f.Title = feed.Title
	f.Update()

	for _, i := range feed.Items {
		log.Printf("storing item: %s", i.Title)
		var item item.Item
		item.Title = i.Title
		item.Url = i.Link
		item.Description = i.Content
		if item.Description == "" {
			item.Description = i.Summary
		}
		item.FeedId = f.Id
		item.Create()
	}
	ch <- "successfully crawled " + f.Url
}