aboutsummaryrefslogtreecommitdiffstats
path: root/importer
diff options
context:
space:
mode:
authorAdam Mathes <adam@adammathes.com>2026-02-12 19:55:05 -0800
committerAdam Mathes <adam@adammathes.com>2026-02-12 19:55:05 -0800
commit16186a344a7b61633cb7342aac37ac56ad83d261 (patch)
tree739556a9dc80457d072a6f3ab1db4226fa25a9f5 /importer
parent39ed5fcfe9327ab4eb81c4863d9e6353f08f6c07 (diff)
downloadneko-16186a344a7b61633cb7342aac37ac56ad83d261.tar.gz
neko-16186a344a7b61633cb7342aac37ac56ad83d261.tar.bz2
neko-16186a344a7b61633cb7342aac37ac56ad83d261.zip
Add comprehensive test suite — 81% cross-package coverage
Bug fixes: - config: remove unused log import - item: fix Printf format %d->%t for boolean ReadState - util: update stale config.Read -> config.Init, remove config.Config.DBServer Test files added: - config/config_test.go: Init, readConfig, addDefaults (100%) - vlog/vlog_test.go: Printf, Println verbose/silent (100%) - models/db_test.go: InitDB tests - models/feed/feed_test.go: CRUD, filter, Categories, NewFeed, ResolveFeedURL (87%) - models/item/item_test.go: CRUD, Filter with category/search/starred, rewriteImages (71%) - exporter/exporter_test.go: all export formats (91%) - importer/importer_test.go: InsertIItem, ImportJSON (90%) - crawler/crawler_test.go: GetFeedContent, CrawlFeed, CrawlWorker, Crawl (89%) - web/web_test.go: auth, login/logout, stream, item, feed, category, export, crawl, imageProxy handlers (77%) Remaining 0% functions require HTTP/rice.MustFindBox/main entry and can't be unit tested without refactoring (see tickets NK-gqkh96, NK-6q9nyg).
Diffstat (limited to 'importer')
-rw-r--r--importer/importer_test.go129
1 files changed, 129 insertions, 0 deletions
diff --git a/importer/importer_test.go b/importer/importer_test.go
new file mode 100644
index 0000000..00ab822
--- /dev/null
+++ b/importer/importer_test.go
@@ -0,0 +1,129 @@
+package importer
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "adammathes.com/neko/config"
+ "adammathes.com/neko/models"
+)
+
+func setupTestDB(t *testing.T) {
+ t.Helper()
+ config.Config.DBFile = ":memory:"
+ models.InitDB()
+ t.Cleanup(func() {
+ if models.DB != nil {
+ models.DB.Close()
+ }
+ })
+}
+
+func TestInsertIItem(t *testing.T) {
+ setupTestDB(t)
+
+ ii := &IItem{
+ Title: "Test Article",
+ Url: "https://example.com/article",
+ Description: "A test article description",
+ ReadState: false,
+ Starred: true,
+ Date: &IDate{Date: "2024-01-15 10:00:00"},
+ Feed: &IFeed{
+ Url: "https://example.com/feed",
+ Title: "Example Feed",
+ },
+ }
+
+ InsertIItem(ii)
+
+ // Verify the feed was created
+ var feedCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM feed").Scan(&feedCount)
+ if feedCount != 1 {
+ t.Errorf("Expected 1 feed, got %d", feedCount)
+ }
+
+ // Verify the item was created
+ var itemCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM item").Scan(&itemCount)
+ if itemCount != 1 {
+ t.Errorf("Expected 1 item, got %d", itemCount)
+ }
+}
+
+func TestInsertIItemNilFeed(t *testing.T) {
+ setupTestDB(t)
+
+ ii := &IItem{
+ Title: "No Feed Item",
+ Url: "https://example.com/nofeed",
+ Feed: nil,
+ }
+
+ // Should not panic
+ InsertIItem(ii)
+
+ var itemCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM item").Scan(&itemCount)
+ if itemCount != 0 {
+ t.Errorf("Expected 0 items (nil feed should be skipped), got %d", itemCount)
+ }
+}
+
+func TestInsertIItemExistingFeed(t *testing.T) {
+ setupTestDB(t)
+
+ // Insert feed first
+ models.DB.Exec("INSERT INTO feed(url, title) VALUES(?, ?)", "https://example.com/feed", "Existing Feed")
+
+ ii := &IItem{
+ Title: "New Article",
+ Url: "https://example.com/new-article",
+ Description: "New article desc",
+ Date: &IDate{Date: "2024-01-15"},
+ Feed: &IFeed{
+ Url: "https://example.com/feed",
+ Title: "Existing Feed",
+ },
+ }
+
+ InsertIItem(ii)
+
+ // Should still be just 1 feed
+ var feedCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM feed").Scan(&feedCount)
+ if feedCount != 1 {
+ t.Errorf("Expected 1 feed (reuse existing), got %d", feedCount)
+ }
+}
+
+func TestImportJSON(t *testing.T) {
+ setupTestDB(t)
+
+ dir := t.TempDir()
+ jsonFile := filepath.Join(dir, "import.json")
+
+ content := `{"title":"Article 1","url":"https://example.com/1","description":"desc1","read":false,"starred":false,"date":{"$date":"2024-01-01"},"feed":{"url":"https://example.com/feed","title":"Feed 1"}}
+{"title":"Article 2","url":"https://example.com/2","description":"desc2","read":true,"starred":true,"date":{"$date":"2024-01-02"},"feed":{"url":"https://example.com/feed","title":"Feed 1"}}`
+
+ err := os.WriteFile(jsonFile, []byte(content), 0644)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ ImportJSON(jsonFile)
+
+ var itemCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM item").Scan(&itemCount)
+ if itemCount != 2 {
+ t.Errorf("Expected 2 items after import, got %d", itemCount)
+ }
+
+ var feedCount int
+ models.DB.QueryRow("SELECT COUNT(*) FROM feed").Scan(&feedCount)
+ if feedCount != 1 {
+ t.Errorf("Expected 1 feed after import, got %d", feedCount)
+ }
+}