blob: 3d98e02c455b89b238dd1ec0290f57d94e8a0f65 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
|
package crawler
import (
"log"
"github.com/adammathes/neko/models/feed"
"github.com/adammathes/neko/models/item"
"net/http"
"time"
"github.com/mmcdole/gofeed"
)
func Crawl() {
ch := make(chan string)
feeds,err := feed.All()
if err != nil {
log.Fatal(err)
}
for _, f := range feeds {
log.Printf("crawling %s", f.Url)
go CrawlFeed(f, ch)
}
for i := 0; i < len(feeds); i++ {
log.Println(<-ch)
}
}
/*
TODO: sanitize input on crawl
*/
func CrawlFeed(f *feed.Feed, ch chan<- string) {
c := &http.Client{
// give up after 5 seconds
Timeout: 5 * time.Second,
}
fp := gofeed.NewParser()
fp.Client = c
feed, err := fp.ParseURL(f.Url)
if err != nil {
log.Print(err)
ch <- "failed to fetch and parse for " + f.Url
return
}
f.Title = feed.Title
f.Update()
for _, i := range feed.Items {
log.Printf("storing item: %s", i.Title)
var item item.Item
item.Title = i.Title
item.Url = i.Link
item.Description = i.Description
if len(i.Content) > len(item.Description) {
item.Description = i.Content
}
// a lot of RSS2.0 generated by wordpress and others
// uses <content:encoded>
e,ok := i.Extensions["content"]["encoded"]
var encoded = ""
if ok {
encoded = e[0].Value
}
if len(encoded) > len(item.Description) {
item.Description = encoded
}
item.PublishDate = i.PublishedParsed.Format(time.RFC3339)
item.FeedId = f.Id
err := item.Create()
if err != nil {
log.Println(err)
}
}
ch <- "successfully crawled " + f.Url
}
|