aboutsummaryrefslogtreecommitdiffstats
path: root/internal
diff options
context:
space:
mode:
authorAdam Mathes <adam@adammathes.com>2026-02-14 08:58:38 -0800
committerAdam Mathes <adam@adammathes.com>2026-02-14 08:58:38 -0800
commite3c379d069ffa9661561d25cdbf2f5894a2f8ee8 (patch)
tree24d0e9f5610dd9c8f873c5b78e6bc1c88d32840a /internal
parent4b06155fbde91a1bef6361ef36efb28789861928 (diff)
downloadneko-e3c379d069ffa9661561d25cdbf2f5894a2f8ee8.tar.gz
neko-e3c379d069ffa9661561d25cdbf2f5894a2f8ee8.tar.bz2
neko-e3c379d069ffa9661561d25cdbf2f5894a2f8ee8.zip
Refactor: project structure, implement dependency injection, and align v2 UI with v1
Diffstat (limited to 'internal')
-rw-r--r--internal/crawler/crawler.go161
-rw-r--r--internal/crawler/crawler_test.go278
-rw-r--r--internal/crawler/integration_test.go67
-rw-r--r--internal/exporter/exporter.go61
-rw-r--r--internal/exporter/exporter_test.go111
-rw-r--r--internal/importer/importer.go89
-rw-r--r--internal/importer/importer_test.go149
-rw-r--r--internal/vlog/vlog.go25
-rw-r--r--internal/vlog/vlog_test.go78
9 files changed, 1019 insertions, 0 deletions
diff --git a/internal/crawler/crawler.go b/internal/crawler/crawler.go
new file mode 100644
index 0000000..10253d8
--- /dev/null
+++ b/internal/crawler/crawler.go
@@ -0,0 +1,161 @@
+package crawler
+
+import (
+ "io/ioutil"
+ "log"
+ "net/http"
+ "time"
+
+ "adammathes.com/neko/internal/vlog"
+ "adammathes.com/neko/models/feed"
+ "adammathes.com/neko/models/item"
+ "github.com/mmcdole/gofeed"
+)
+
+const MAX_CRAWLERS = 5
+
+func Crawl() {
+ crawlJobs := make(chan *feed.Feed, 100)
+ results := make(chan string, 100)
+
+ feeds, err := feed.All()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ for i := 0; i < MAX_CRAWLERS; i++ {
+ vlog.Printf("spawning crawl worker %d\n", i)
+ go CrawlWorker(crawlJobs, results)
+ }
+
+ for _, f := range feeds {
+ vlog.Printf("sending crawl job %s\n", f.Url)
+ crawlJobs <- f
+ }
+ close(crawlJobs)
+
+ for i := 0; i < len(feeds); i++ {
+ vlog.Println(<-results)
+ }
+ close(results)
+}
+
+func CrawlWorker(feeds <-chan *feed.Feed, results chan<- string) {
+
+ for f := range feeds {
+ vlog.Printf("crawl job received %s\n", f.Url)
+ CrawlFeed(f, results)
+ vlog.Printf("crawl job finished %s\n", f.Url)
+ }
+}
+
+/*
+Simple HTTP Get fnx with custom user agent header
+*/
+func GetFeedContent(feedURL string) string {
+
+ // introduce delays for testing
+ // n := time.Duration(rand.Int63n(3))
+ // time.Sleep(n * time.Second)
+
+ c := &http.Client{
+ // give up after 5 seconds
+ Timeout: 5 * time.Second,
+ }
+
+ request, err := http.NewRequest("GET", feedURL, nil)
+ if err != nil {
+ log.Fatalln(err)
+ }
+
+ userAgent := "neko RSS Crawler +https://github.com/adammathes/neko"
+ request.Header.Set("User-Agent", userAgent)
+ resp, err := c.Do(request)
+
+ if err != nil {
+ return ""
+ }
+
+ if resp != nil {
+ defer func() {
+ ce := resp.Body.Close()
+ if ce != nil {
+ err = ce
+ }
+ }()
+ }
+
+ if resp.StatusCode < 200 || resp.StatusCode >= 300 {
+ return ""
+ }
+
+ bodyBytes, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return ""
+ }
+ return string(bodyBytes)
+}
+
+/*
+TODO: sanitize input on crawl
+*/
+func CrawlFeed(f *feed.Feed, ch chan<- string) {
+ c := &http.Client{
+ // give up after 5 seconds
+ Timeout: 5 * time.Second,
+ }
+
+ fp := gofeed.NewParser()
+ fp.Client = c
+
+ content := GetFeedContent(f.Url)
+ feed, err := fp.ParseString(content)
+ if err != nil {
+ vlog.Println(err)
+ ch <- "failed parse for " + f.Url + "\n"
+ return
+ }
+
+ f.Title = feed.Title
+ f.WebUrl = feed.Link
+ f.Update()
+
+ for _, i := range feed.Items {
+ vlog.Printf("storing item: %s\n", i.Link)
+ var item item.Item
+ item.Title = i.Title
+ item.Url = i.Link
+
+ item.Description = i.Description
+ if len(i.Content) > len(item.Description) {
+ item.Description = i.Content
+ }
+
+ // a lot of RSS2.0 generated by wordpress and others
+ // uses <content:encoded>
+ e, ok := i.Extensions["content"]["encoded"]
+ var encoded = ""
+ if ok {
+ encoded = e[0].Value
+ }
+ if len(encoded) > len(item.Description) {
+ item.Description = encoded
+ }
+
+ if i.PublishedParsed != nil {
+ item.PublishDate = i.PublishedParsed.Format("2006-01-02 15:04:05")
+ } else {
+ item.PublishDate = time.Now().Format("2006-01-02 15:04:05")
+ }
+
+ item.FeedId = f.Id
+ err := item.Create()
+ if err != nil {
+ vlog.Println(err)
+ }
+ // else {
+ // item.GetFullContent()
+ //}
+ }
+ ch <- "successfully crawled " + f.Url + "\n"
+}
diff --git a/internal/crawler/crawler_test.go b/internal/crawler/crawler_test.go
new file mode 100644
index 0000000..e0c4c6b
--- /dev/null
+++ b/internal/crawler/crawler_test.go
@@ -0,0 +1,278 @@
+package crawler
+
+import (
+ "log"
+ "net/http"
+ "net/http/httptest"
+ "strings"
+ "testing"
+
+ "adammathes.com/neko/config"
+ "adammathes.com/neko/models"
+ "adammathes.com/neko/models/feed"
+)
+
+func setupTestDB(t *testing.T) {
+ t.Helper()
+ config.Config.DBFile = ":memory:"
+ models.InitDB()
+ t.Cleanup(func() {
+ if models.DB != nil {
+ models.DB.Close()
+ }
+ })
+}
+
+func TestGetFeedContentSuccess(t *testing.T) {
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ua := r.Header.Get("User-Agent")
+ if ua == "" {
+ t.Error("Request should include User-Agent")
+ }
+ w.WriteHeader(200)
+ w.Write([]byte("<rss><channel><title>Test</title></channel></rss>"))
+ }))
+ defer ts.Close()
+
+ content := GetFeedContent(ts.URL)
+ if content == "" {
+ t.Error("GetFeedContent should return content for valid URL")
+ }
+ if content != "<rss><channel><title>Test</title></channel></rss>" {
+ t.Errorf("Unexpected content: %q", content)
+ }
+}
+
+func TestGetFeedContentBadURL(t *testing.T) {
+ content := GetFeedContent("http://invalid.invalid.invalid:99999/feed")
+ if content != "" {
+ t.Errorf("GetFeedContent should return empty string for bad URL, got %q", content)
+ }
+}
+
+func TestGetFeedContent404(t *testing.T) {
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(404)
+ }))
+ defer ts.Close()
+
+ content := GetFeedContent(ts.URL)
+ if content != "" {
+ t.Errorf("GetFeedContent should return empty for 404, got %q", content)
+ }
+}
+
+func TestGetFeedContent500(t *testing.T) {
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(500)
+ }))
+ defer ts.Close()
+
+ content := GetFeedContent(ts.URL)
+ if content != "" {
+ t.Errorf("GetFeedContent should return empty for 500, got %q", content)
+ }
+}
+
+func TestGetFeedContentUserAgent(t *testing.T) {
+ var receivedUA string
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ receivedUA = r.Header.Get("User-Agent")
+ w.WriteHeader(200)
+ w.Write([]byte("ok"))
+ }))
+ defer ts.Close()
+
+ GetFeedContent(ts.URL)
+ expected := "neko RSS Crawler +https://github.com/adammathes/neko"
+ if receivedUA != expected {
+ t.Errorf("Expected UA %q, got %q", expected, receivedUA)
+ }
+}
+
+func TestCrawlFeedWithTestServer(t *testing.T) {
+ setupTestDB(t)
+
+ rssContent := `<?xml version="1.0" encoding="UTF-8"?>
+<rss version="2.0">
+ <channel>
+ <title>Test Feed</title>
+ <link>https://example.com</link>
+ <item>
+ <title>Article 1</title>
+ <link>https://example.com/article1</link>
+ <description>First article</description>
+ <pubDate>Mon, 01 Jan 2024 00:00:00 GMT</pubDate>
+ </item>
+ <item>
+ <title>Article 2</title>
+ <link>https://example.com/article2</link>
+ <description>Second article</description>
+ <pubDate>Tue, 02 Jan 2024 00:00:00 GMT</pubDate>
+ </item>
+ </channel>
+</rss>`
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "application/rss+xml")
+ w.WriteHeader(200)
+ w.Write([]byte(rssContent))
+ }))
+ defer ts.Close()
+
+ // Create a feed pointing to the test server
+ f := &feed.Feed{Url: ts.URL, Title: "Test"}
+ f.Create()
+
+ ch := make(chan string, 1)
+ CrawlFeed(f, ch)
+ result := <-ch
+
+ if result == "" {
+ t.Error("CrawlFeed should send a result")
+ }
+
+ // Verify items were created
+ var count int
+ models.DB.QueryRow("SELECT COUNT(*) FROM item").Scan(&count)
+ if count != 2 {
+ t.Errorf("Expected 2 items, got %d", count)
+ }
+}
+
+func TestCrawlFeedBadContent(t *testing.T) {
+ setupTestDB(t)
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(200)
+ w.Write([]byte("not xml at all"))
+ }))
+ defer ts.Close()
+
+ f := &feed.Feed{Url: ts.URL, Title: "Bad"}
+ f.Create()
+
+ ch := make(chan string, 1)
+ CrawlFeed(f, ch)
+ result := <-ch
+
+ if result == "" {
+ t.Error("CrawlFeed should send a result even on failure")
+ }
+}
+
+func TestCrawlWorker(t *testing.T) {
+ setupTestDB(t)
+
+ rssContent := `<?xml version="1.0" encoding="UTF-8"?>
+<rss version="2.0">
+ <channel>
+ <title>Worker Feed</title>
+ <link>https://example.com</link>
+ <item>
+ <title>Worker Article</title>
+ <link>https://example.com/worker-article</link>
+ <description>An article</description>
+ </item>
+ </channel>
+</rss>`
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(200)
+ w.Write([]byte(rssContent))
+ }))
+ defer ts.Close()
+
+ f := &feed.Feed{Url: ts.URL, Title: "Worker Test"}
+ f.Create()
+
+ feeds := make(chan *feed.Feed, 1)
+ results := make(chan string, 1)
+
+ feeds <- f
+ close(feeds)
+
+ CrawlWorker(feeds, results)
+ result := <-results
+
+ if result == "" {
+ t.Error("CrawlWorker should produce a result")
+ }
+}
+
+func TestCrawl(t *testing.T) {
+ setupTestDB(t)
+
+ rssContent := `<?xml version="1.0" encoding="UTF-8"?>
+<rss version="2.0">
+ <channel>
+ <title>Crawl Feed</title>
+ <link>https://example.com</link>
+ <item>
+ <title>Crawl Article</title>
+ <link>https://example.com/crawl-article</link>
+ <description>Article for crawl test</description>
+ </item>
+ </channel>
+</rss>`
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(200)
+ w.Write([]byte(rssContent))
+ }))
+ defer ts.Close()
+
+ f := &feed.Feed{Url: ts.URL, Title: "Full Crawl"}
+ f.Create()
+
+ // Should not panic
+ Crawl()
+
+ var count int
+ models.DB.QueryRow("SELECT COUNT(*) FROM item").Scan(&count)
+ if count != 1 {
+ t.Errorf("Expected 1 item after crawl, got %d", count)
+ }
+}
+
+func TestCrawlFeedWithExtensions(t *testing.T) {
+ setupTestDB(t)
+
+ rssContent := `<?xml version="1.0" encoding="UTF-8"?>
+<rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/">
+ <channel>
+ <title>Extension Feed</title>
+ <item>
+ <title>Extension Article</title>
+ <link>https://example.com/ext</link>
+ <description>Short description</description>
+ <content:encoded><![CDATA[Much longer content that should be used as description]]></content:encoded>
+ </item>
+ </channel>
+</rss>`
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(200)
+ w.Write([]byte(rssContent))
+ }))
+ defer ts.Close()
+
+ f := &feed.Feed{Url: ts.URL, Title: "Extension Test"}
+ f.Create()
+
+ ch := make(chan string, 1)
+ CrawlFeed(f, ch)
+ <-ch
+
+ var itemTitle, itemDesc string
+ err := models.DB.QueryRow("SELECT title, description FROM item WHERE feed_id = ?", f.Id).Scan(&itemTitle, &itemDesc)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ if itemTitle != "Extension Article" {
+ t.Errorf("Expected title 'Extension Article', got %q", itemTitle)
+ }
+ if !strings.Contains(itemDesc, "Much longer content") {
+ t.Errorf("Expected description to contain encoded content, got %q", itemDesc)
+ }
+}
diff --git a/internal/crawler/integration_test.go b/internal/crawler/integration_test.go
new file mode 100644
index 0000000..633b60f
--- /dev/null
+++ b/internal/crawler/integration_test.go
@@ -0,0 +1,67 @@
+package crawler
+
+import (
+ "fmt"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "testing"
+
+ "adammathes.com/neko/models/feed"
+ "adammathes.com/neko/models/item"
+)
+
+func TestCrawlIntegration(t *testing.T) {
+ setupTestDB(t)
+
+ // Mock RSS feed server
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "application/rss+xml")
+ os.Stdout.Write([]byte("serving mock rss\n"))
+ fmt.Fprint(w, `<?xml version="1.0" encoding="UTF-8" ?>
+<rss version="2.0">
+<channel>
+ <title>Test Feed</title>
+ <link>http://example.com/</link>
+ <description>Test Description</description>
+ <item>
+ <title>Test Item 1</title>
+ <link>http://example.com/item1</link>
+ <description>Item 1 Description</description>
+ <pubDate>Mon, 01 Jan 2024 00:00:00 +0000</pubDate>
+ </item>
+</channel>
+</rss>`)
+ }))
+ defer ts.Close()
+
+ // Add the feed
+ f := &feed.Feed{Url: ts.URL}
+ err := f.Create()
+ if err != nil {
+ t.Fatalf("Failed to create feed: %v", err)
+ }
+
+ // Crawl
+ ch := make(chan string, 1)
+ CrawlFeed(f, ch)
+
+ res := <-ch
+ if res == "" {
+ t.Fatal("CrawlFeed returned empty result")
+ }
+
+ // Verify items were stored
+ items, err := item.Filter(0, f.Id, "", false, false, 0, "")
+ if err != nil {
+ t.Fatalf("Failed to filter items: %v", err)
+ }
+
+ if len(items) != 1 {
+ t.Fatalf("Expected 1 item, got %d", len(items))
+ }
+
+ if items[0].Title != "Test Item 1" {
+ t.Errorf("Expected 'Test Item 1', got %q", items[0].Title)
+ }
+}
diff --git a/internal/exporter/exporter.go b/internal/exporter/exporter.go
new file mode 100644
index 0000000..9172fec
--- /dev/null
+++ b/internal/exporter/exporter.go
@@ -0,0 +1,61 @@
+package exporter
+
+import (
+ "adammathes.com/neko/models/feed"
+ "bytes"
+ "encoding/json"
+ "encoding/xml"
+ "fmt"
+ "html/template"
+)
+
+func ExportFeeds(format string) string {
+ feeds, err := feed.All()
+ if err != nil {
+ panic(err)
+ }
+
+ s := ""
+ switch format {
+ case "text":
+ for _, f := range feeds {
+ s = s + fmt.Sprintf("%s\n", f.Url)
+ }
+
+ case "opml":
+ s = s + fmt.Sprintf(`<opml version="2.0"><head><title>neko feeds</title></head><body>`)
+ s = s + fmt.Sprintf("\n")
+ for _, f := range feeds {
+ b, _ := xml.Marshal(f)
+ s = s + fmt.Sprintf("%s\n", string(b))
+ }
+ s = s + fmt.Sprintf(`</body></opml>`)
+
+ case "json":
+ js, _ := json.Marshal(feeds)
+ s = fmt.Sprintf("%s\n", js)
+
+ case "html":
+ htmlTemplateString := `<html>
+<head>
+<title>feeds</title>
+</head>
+<body>
+<ul>
+{{ range . }}
+<li><a href="{{.WebUrl}}">{{.Title}}</a> | <a href="{{.Url}}">xml</a></li>
+{{ end }}
+</ul>
+</body>
+</html>`
+ var bts bytes.Buffer
+ htmlTemplate, err := template.New("feeds").Parse(htmlTemplateString)
+ err = htmlTemplate.Execute(&bts, feeds)
+ if err != nil {
+ panic(err)
+ }
+ s = bts.String()
+ }
+
+ return s
+}
diff --git a/internal/exporter/exporter_test.go b/internal/exporter/exporter_test.go
new file mode 100644
index 0000000..d4cc994
--- /dev/null
+++ b/internal/exporter/exporter_test.go
@@ -0,0 +1,111 @@
+package exporter
+
+import (
+ "encoding/json"
+ "strings"
+ "testing"
+
+ "adammathes.com/neko/config"
+ "adammathes.com/neko/models"
+)
+
+func setupTestDB(t *testing.T) {
+ t.Helper()
+ config.Config.DBFile = ":memory:"
+ models.InitDB()
+ t.Cleanup(func() {
+ if models.DB != nil {
+ models.DB.Close()
+ }
+ })
+}
+
+func seedFeeds(t *testing.T) {
+ t.Helper()
+ _, err := models.DB.Exec("INSERT INTO feed(url, web_url, title, category) VALUES(?, ?, ?, ?)",
+ "https://a.com/feed", "https://a.com", "Alpha Feed", "tech")
+ if err != nil {
+ t.Fatal(err)
+ }
+ _, err = models.DB.Exec("INSERT INTO feed(url, web_url, title, category) VALUES(?, ?, ?, ?)",
+ "https://b.com/feed", "https://b.com", "Beta Feed", "news")
+ if err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestExportText(t *testing.T) {
+ setupTestDB(t)
+ seedFeeds(t)
+
+ result := ExportFeeds("text")
+ if !strings.Contains(result, "https://a.com/feed") {
+ t.Error("text export should contain feed URL a")
+ }
+ if !strings.Contains(result, "https://b.com/feed") {
+ t.Error("text export should contain feed URL b")
+ }
+}
+
+func TestExportJSON(t *testing.T) {
+ setupTestDB(t)
+ seedFeeds(t)
+
+ result := ExportFeeds("json")
+ var feeds []interface{}
+ err := json.Unmarshal([]byte(result), &feeds)
+ if err != nil {
+ t.Fatalf("JSON export should be valid JSON: %v", err)
+ }
+ if len(feeds) != 2 {
+ t.Errorf("JSON export should contain 2 feeds, got %d", len(feeds))
+ }
+}
+
+func TestExportOPML(t *testing.T) {
+ setupTestDB(t)
+ seedFeeds(t)
+
+ result := ExportFeeds("opml")
+ if !strings.Contains(result, "<opml") {
+ t.Error("OPML export should contain opml tag")
+ }
+ if !strings.Contains(result, "Alpha Feed") || !strings.Contains(result, "Beta Feed") {
+ t.Error("OPML export should contain feed titles")
+ }
+ if !strings.Contains(result, "</opml>") {
+ t.Error("OPML export should close opml tag")
+ }
+}
+
+func TestExportHTML(t *testing.T) {
+ setupTestDB(t)
+ seedFeeds(t)
+
+ result := ExportFeeds("html")
+ if !strings.Contains(result, "<html>") {
+ t.Error("HTML export should contain html tag")
+ }
+ if !strings.Contains(result, "Alpha Feed") {
+ t.Error("HTML export should contain feed title")
+ }
+}
+
+func TestExportUnknownFormat(t *testing.T) {
+ setupTestDB(t)
+ seedFeeds(t)
+
+ result := ExportFeeds("unknown")
+ if result != "" {
+ t.Errorf("Unknown format should return empty string, got %q", result)
+ }
+}
+
+func TestExportEmpty(t *testing.T) {
+ setupTestDB(t)
+
+ result := ExportFeeds("text")
+ if result != "" {
+ t.Errorf("Export with no feeds should be empty, got %q", result)
+ }
+}
diff --git a/internal/importer/importer.go b/internal/importer/importer.go
new file mode 100644
index 0000000..73a2cd8
--- /dev/null
+++ b/internal/importer/importer.go
@@ -0,0 +1,89 @@
+package importer
+
+import (
+ // "bufio"
+ "encoding/json"
+ //"fmt"
+ "io"
+ "log"
+ "os"
+
+ "adammathes.com/neko/models/feed"
+ "adammathes.com/neko/models/item"
+)
+
+type IItem struct {
+ Title string `json:"title"`
+ Url string `json:"url"`
+ Description string `json:"description"`
+ ReadState bool `json:"read"`
+ Starred bool `json:"starred"`
+ Date *IDate `json:"date"`
+ Feed *IFeed `json:"feed"`
+}
+
+type IFeed struct {
+ Url string `json:"url"`
+ Title string `json:"title"`
+ WebUrl string `json:"web_url"`
+}
+
+type IDate struct {
+ Date string `json:"$date"`
+}
+
+func ImportJSON(filename string) error {
+
+ f, err := os.Open(filename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ dec := json.NewDecoder(f)
+ for {
+ var ii IItem
+ if err := dec.Decode(&ii); err == io.EOF {
+ break
+ } else if err != nil {
+ return err
+ } else {
+ err := InsertIItem(&ii)
+ if err != nil {
+ log.Println(err)
+ }
+ }
+ }
+ return nil
+}
+
+func InsertIItem(ii *IItem) error {
+ var f feed.Feed
+
+ if ii.Feed == nil {
+ return nil
+ }
+ err := f.ByUrl(ii.Feed.Url)
+ if err != nil {
+ f.Url = ii.Feed.Url
+ f.Title = ii.Feed.Title
+ err = f.Create()
+ if err != nil {
+ return err
+ }
+ }
+
+ var i item.Item
+ i.FeedId = f.Id
+ i.Title = ii.Title
+ i.Url = ii.Url
+ i.Description = ii.Description
+
+ if ii.Date != nil {
+ i.PublishDate = ii.Date.Date
+ }
+
+ err = i.Create()
+ log.Printf("inserted %s\n", i.Url)
+ return err
+}
diff --git a/internal/importer/importer_test.go b/internal/importer/importer_test.go
new file mode 100644
index 0000000..59f06f1
--- /dev/null
+++ b/internal/importer/importer_test.go
@@ -0,0 +1,149 @@
+package importer
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "adammathes.com/neko/config"
+ "adammathes.com/neko/models"
+)
+
+func setupTestDB(t *testing.T) {
+ t.Helper()
+ config.Config.DBFile = filepath.Join(t.TempDir(), "test.db")
+ models.InitDB()
+ t.Cleanup(func() {
+ if models.DB != nil {
+ models.DB.Close()
+ }
+ })
+}
+
+func TestInsertIItem(t *testing.T) {
+ setupTestDB(t)
+
+ ii := &IItem{
+ Title: "Test Article",
+ Url: "https://example.com/article",
+ Description: "A test article description",
+ ReadState: false,
+ Starred: true,
+ Date: &IDate{Date: "2024-01-15 10:00:00"},
+ Feed: &IFeed{
+ Url: "https://example.com/feed",
+ Title: "Example Feed",
+ },
+ }
+
+ InsertIItem(ii)
+
+ // Verify the feed was created
+ var feedCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM feed").Scan(&feedCount)
+ if feedCount != 1 {
+ t.Errorf("Expected 1 feed, got %d", feedCount)
+ }
+
+ // Verify the item was created
+ var itemCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM item").Scan(&itemCount)
+ if itemCount != 1 {
+ t.Errorf("Expected 1 item, got %d", itemCount)
+ }
+}
+
+func TestInsertIItemNilFeed(t *testing.T) {
+ setupTestDB(t)
+
+ ii := &IItem{
+ Title: "No Feed Item",
+ Url: "https://example.com/nofeed",
+ Feed: nil,
+ }
+
+ // Should not panic
+ InsertIItem(ii)
+
+ var itemCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM item").Scan(&itemCount)
+ if itemCount != 0 {
+ t.Errorf("Expected 0 items (nil feed should be skipped), got %d", itemCount)
+ }
+}
+
+func TestInsertIItemExistingFeed(t *testing.T) {
+ setupTestDB(t)
+
+ // Insert feed first
+ models.DB.Exec("INSERT INTO feed(url, title) VALUES(?, ?)", "https://example.com/feed", "Existing Feed")
+
+ ii := &IItem{
+ Title: "New Article",
+ Url: "https://example.com/new-article",
+ Description: "New article desc",
+ Date: &IDate{Date: "2024-01-15"},
+ Feed: &IFeed{
+ Url: "https://example.com/feed",
+ Title: "Existing Feed",
+ },
+ }
+
+ InsertIItem(ii)
+
+ // Should still be just 1 feed
+ var feedCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM feed").Scan(&feedCount)
+ if feedCount != 1 {
+ t.Errorf("Expected 1 feed (reuse existing), got %d", feedCount)
+ }
+}
+
+func TestImportJSON(t *testing.T) {
+ setupTestDB(t)
+
+ dir := t.TempDir()
+ jsonFile := filepath.Join(dir, "import.json")
+
+ content := `{"title":"Article 1","url":"https://example.com/1","description":"desc1","read":false,"starred":false,"date":{"$date":"2024-01-01"},"feed":{"url":"https://example.com/feed","title":"Feed 1"}}
+{"title":"Article 2","url":"https://example.com/2","description":"desc2","read":true,"starred":true,"date":{"$date":"2024-01-02"},"feed":{"url":"https://example.com/feed","title":"Feed 1"}}`
+
+ err := os.WriteFile(jsonFile, []byte(content), 0644)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ ImportJSON(jsonFile)
+
+ var itemCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM item").Scan(&itemCount)
+ if itemCount != 2 {
+ t.Errorf("Expected 2 items after import, got %d", itemCount)
+ }
+
+ var feedCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM feed").Scan(&feedCount)
+ if feedCount != 1 {
+ t.Errorf("Expected 1 feed after import, got %d", feedCount)
+ }
+}
+
+func TestImportJSONInvalid(t *testing.T) {
+ setupTestDB(t)
+ dir := t.TempDir()
+ jsonFile := filepath.Join(dir, "invalid.json")
+ os.WriteFile(jsonFile, []byte("not json"), 0644)
+
+ err := ImportJSON(jsonFile)
+ if err == nil {
+ t.Error("ImportJSON should error on invalid JSON")
+ }
+}
+
+func TestImportJSONNonexistent(t *testing.T) {
+ setupTestDB(t)
+ err := ImportJSON("/nonexistent/file.json")
+ if err == nil {
+ t.Error("ImportJSON should error on nonexistent file")
+ }
+}
diff --git a/internal/vlog/vlog.go b/internal/vlog/vlog.go
new file mode 100644
index 0000000..ab48478
--- /dev/null
+++ b/internal/vlog/vlog.go
@@ -0,0 +1,25 @@
+// vlog -- verbose logger -- wraps log functions and only performs them if "verbose"
+package vlog
+
+import (
+ "fmt"
+)
+
+var VERBOSE bool
+
+func init() {
+ VERBOSE=false
+}
+
+func Printf(format string, v ...interface{}) {
+ if VERBOSE {
+ fmt.Printf(format, v...)
+ }
+}
+
+func Println(v ...interface{}) {
+ if VERBOSE {
+ fmt.Println(v...)
+ }
+}
+
diff --git a/internal/vlog/vlog_test.go b/internal/vlog/vlog_test.go
new file mode 100644
index 0000000..9def0f0
--- /dev/null
+++ b/internal/vlog/vlog_test.go
@@ -0,0 +1,78 @@
+package vlog
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "testing"
+)
+
+func captureStdout(f func()) string {
+ old := os.Stdout
+ r, w, _ := os.Pipe()
+ os.Stdout = w
+
+ f()
+
+ w.Close()
+ os.Stdout = old
+
+ var buf bytes.Buffer
+ buf.ReadFrom(r)
+ return buf.String()
+}
+
+func TestPrintfVerbose(t *testing.T) {
+ VERBOSE = true
+ defer func() { VERBOSE = false }()
+
+ output := captureStdout(func() {
+ Printf("hello %s", "world")
+ })
+ expected := fmt.Sprintf("hello %s", "world")
+ if output != expected {
+ t.Errorf("expected %q, got %q", expected, output)
+ }
+}
+
+func TestPrintfSilent(t *testing.T) {
+ VERBOSE = false
+
+ output := captureStdout(func() {
+ Printf("hello %s", "world")
+ })
+ if output != "" {
+ t.Errorf("expected empty output when not verbose, got %q", output)
+ }
+}
+
+func TestPrintlnVerbose(t *testing.T) {
+ VERBOSE = true
+ defer func() { VERBOSE = false }()
+
+ output := captureStdout(func() {
+ Println("hello", "world")
+ })
+ expected := fmt.Sprintln("hello", "world")
+ if output != expected {
+ t.Errorf("expected %q, got %q", expected, output)
+ }
+}
+
+func TestPrintlnSilent(t *testing.T) {
+ VERBOSE = false
+
+ output := captureStdout(func() {
+ Println("hello", "world")
+ })
+ if output != "" {
+ t.Errorf("expected empty output when not verbose, got %q", output)
+ }
+}
+
+func TestInit(t *testing.T) {
+ // init() sets VERBOSE to false
+ if VERBOSE != false {
+ t.Error("VERBOSE should default to false")
+ }
+}