From d4caf45b2b9ea6a3276de792cf6f73085e66b1ae Mon Sep 17 00:00:00 2001 From: Claude Date: Mon, 16 Feb 2026 18:15:47 +0000 Subject: Add performance benchmarks, stress tests, and frontend perf tests Go benchmarks cover item CRUD/filter/sanitization, API endpoints (stream, item update, feed list), middleware stack (gzip, security headers, CSRF), and crawler pipeline (feed parsing, mocked crawl). Stress tests verify concurrent reads/writes and large dataset handling. Frontend perf tests measure template generation, DOM insertion, and store event throughput. New Makefile targets: bench, bench-short, stress, test-perf. https://claude.ai/code/session_01ChDVWFDrQoFjMYHpaLGr9s --- Makefile | 14 +- api/api_bench_test.go | 133 +++++++++++++ api/api_stress_test.go | 212 ++++++++++++++++++++ frontend-vanilla/src/perf/renderItems.perf.test.ts | 90 +++++++++ frontend-vanilla/src/perf/store.perf.test.ts | 124 ++++++++++++ internal/crawler/crawler_bench_test.go | 141 +++++++++++++ models/item/item_bench_test.go | 219 +++++++++++++++++++++ web/web_bench_test.go | 92 +++++++++ 8 files changed, 1024 insertions(+), 1 deletion(-) create mode 100644 api/api_bench_test.go create mode 100644 api/api_stress_test.go create mode 100644 frontend-vanilla/src/perf/renderItems.perf.test.ts create mode 100644 frontend-vanilla/src/perf/store.perf.test.ts create mode 100644 internal/crawler/crawler_bench_test.go create mode 100644 models/item/item_bench_test.go create mode 100644 web/web_bench_test.go diff --git a/Makefile b/Makefile index 3e0e7bb..2356f07 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ VERSION=0.3 BUILD=`git rev-parse HEAD` LDFLAGS=-ldflags "-X main.Version=${VERSION} -X main.Build=${BUILD}" -.PHONY: default all clean ui build install test test-race test-frontend test-e2e ui-check lint check ci run dev docs install-hooks cover coverage-html +.PHONY: default all clean ui build install test test-race test-frontend test-e2e ui-check lint check ci run dev docs install-hooks cover coverage-html bench bench-short stress test-perf default: build @@ -80,6 +80,18 @@ install-hooks: chmod +x scripts/install-hooks.sh ./scripts/install-hooks.sh +bench: + ${GO} test -bench=. -benchmem -count=3 -run=^$$ ./... + +bench-short: + ${GO} test -bench=. -benchmem -count=1 -run=^$$ ./... + +stress: + ${GO} test -run=TestStress -count=1 -timeout=120s ./... + +test-perf: + cd frontend-vanilla && ${NPM} test -- --run src/perf/ + docs: readme.html readme.html: README.md diff --git a/api/api_bench_test.go b/api/api_bench_test.go new file mode 100644 index 0000000..0018afe --- /dev/null +++ b/api/api_bench_test.go @@ -0,0 +1,133 @@ +package api + +import ( + "bytes" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "path/filepath" + "strconv" + "testing" + + "adammathes.com/neko/config" + "adammathes.com/neko/models" + "adammathes.com/neko/models/feed" + "adammathes.com/neko/models/item" +) + +func setupBenchDB(b *testing.B) { + b.Helper() + testMu.Lock() + config.Config.DBFile = filepath.Join(b.TempDir(), "bench.db") + models.InitDB() + b.Cleanup(func() { + if models.DB != nil { + models.DB.Close() + } + testMu.Unlock() + }) +} + +func seedBenchData(b *testing.B, count int) { + b.Helper() + f := &feed.Feed{Url: "http://example.com/bench", Title: "Bench Feed", Category: "tech"} + f.Create() + + for i := 0; i < count; i++ { + it := &item.Item{ + Title: fmt.Sprintf("Bench Item %d", i), + Url: fmt.Sprintf("http://example.com/bench/%d", i), + Description: fmt.Sprintf("

Description for bench item %d with HTML

", i), + PublishDate: "2024-01-01 00:00:00", + FeedId: f.Id, + } + _ = it.Create() + } +} + +func BenchmarkHandleStream(b *testing.B) { + setupBenchDB(b) + seedBenchData(b, 15) + server := newTestServer() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + req := httptest.NewRequest("GET", "/stream", nil) + rr := httptest.NewRecorder() + server.ServeHTTP(rr, req) + if rr.Code != http.StatusOK { + b.Fatalf("expected 200, got %d", rr.Code) + } + } +} + +func BenchmarkHandleStreamWithSearch(b *testing.B) { + setupBenchDB(b) + seedBenchData(b, 50) + server := newTestServer() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + req := httptest.NewRequest("GET", "/stream?q=Bench", nil) + rr := httptest.NewRecorder() + server.ServeHTTP(rr, req) + if rr.Code != http.StatusOK { + b.Fatalf("expected 200, got %d", rr.Code) + } + } +} + +func BenchmarkHandleItemUpdate(b *testing.B) { + setupBenchDB(b) + seedBenchData(b, 1) + server := newTestServer() + + // Get the item ID + items, _ := item.Filter(0, nil, "", false, false, 0, "") + if len(items) == 0 { + b.Fatal("no items seeded") + } + itemID := items[0].Id + + b.ResetTimer() + for i := 0; i < b.N; i++ { + read := i%2 == 0 + body, _ := json.Marshal(item.Item{ + Id: itemID, + ReadState: read, + }) + req := httptest.NewRequest("PUT", "/item/"+strconv.FormatInt(itemID, 10), bytes.NewBuffer(body)) + rr := httptest.NewRecorder() + server.ServeHTTP(rr, req) + if rr.Code != http.StatusOK { + b.Fatalf("expected 200, got %d", rr.Code) + } + } +} + +func BenchmarkHandleFeedList(b *testing.B) { + setupBenchDB(b) + + // Create several feeds + for i := 0; i < 10; i++ { + f := &feed.Feed{ + Url: fmt.Sprintf("http://example.com/feed/%d", i), + Title: fmt.Sprintf("Feed %d", i), + Category: "tech", + } + f.Create() + } + + server := newTestServer() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + req := httptest.NewRequest("GET", "/feed", nil) + rr := httptest.NewRecorder() + server.ServeHTTP(rr, req) + if rr.Code != http.StatusOK { + b.Fatalf("expected 200, got %d", rr.Code) + } + } +} diff --git a/api/api_stress_test.go b/api/api_stress_test.go new file mode 100644 index 0000000..a846f75 --- /dev/null +++ b/api/api_stress_test.go @@ -0,0 +1,212 @@ +package api + +import ( + "bytes" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "strconv" + "sync" + "testing" + "time" + + "adammathes.com/neko/models/feed" + "adammathes.com/neko/models/item" +) + +func TestStress_ConcurrentStreamReads(t *testing.T) { + if testing.Short() { + t.Skip("skipping stress test in short mode") + } + + setupTestDB(t) + seedStressData(t, 50) + server := newTestServer() + + const goroutines = 50 + var wg sync.WaitGroup + errors := make(chan error, goroutines) + + start := time.Now() + for i := 0; i < goroutines; i++ { + wg.Add(1) + go func() { + defer wg.Done() + req := httptest.NewRequest("GET", "/stream", nil) + rr := httptest.NewRecorder() + server.ServeHTTP(rr, req) + if rr.Code != http.StatusOK { + errors <- fmt.Errorf("got status %d", rr.Code) + return + } + var items []item.Item + if err := json.NewDecoder(rr.Body).Decode(&items); err != nil { + errors <- fmt.Errorf("decode error: %v", err) + } + }() + } + wg.Wait() + close(errors) + elapsed := time.Since(start) + + for err := range errors { + t.Errorf("concurrent stream read error: %v", err) + } + + t.Logf("50 concurrent /stream reads completed in %v", elapsed) + if elapsed > 10*time.Second { + t.Errorf("concurrent reads took too long: %v (threshold: 10s)", elapsed) + } +} + +func TestStress_ConcurrentItemUpdates(t *testing.T) { + if testing.Short() { + t.Skip("skipping stress test in short mode") + } + + setupTestDB(t) + + // Seed 50 items for concurrent updates + f := &feed.Feed{Url: "http://example.com/stress", Title: "Stress Feed"} + f.Create() + + var itemIDs []int64 + for i := 0; i < 50; i++ { + it := &item.Item{ + Title: fmt.Sprintf("Stress Item %d", i), + Url: fmt.Sprintf("http://example.com/stress/%d", i), + Description: "

Stress test item

", + PublishDate: "2024-01-01 00:00:00", + FeedId: f.Id, + } + _ = it.Create() + itemIDs = append(itemIDs, it.Id) + } + + server := newTestServer() + + const goroutines = 50 + var wg sync.WaitGroup + errors := make(chan error, goroutines) + + start := time.Now() + for i := 0; i < goroutines; i++ { + wg.Add(1) + go func(idx int) { + defer wg.Done() + id := itemIDs[idx] + body, _ := json.Marshal(item.Item{ + Id: id, + ReadState: true, + Starred: idx%2 == 0, + }) + req := httptest.NewRequest("PUT", "/item/"+strconv.FormatInt(id, 10), bytes.NewBuffer(body)) + rr := httptest.NewRecorder() + server.ServeHTTP(rr, req) + if rr.Code != http.StatusOK { + errors <- fmt.Errorf("item %d update got status %d", id, rr.Code) + } + }(i) + } + wg.Wait() + close(errors) + elapsed := time.Since(start) + + for err := range errors { + t.Errorf("concurrent item update error: %v", err) + } + + t.Logf("50 concurrent item updates completed in %v", elapsed) + if elapsed > 10*time.Second { + t.Errorf("concurrent updates took too long: %v (threshold: 10s)", elapsed) + } +} + +func TestStress_LargeDataset(t *testing.T) { + if testing.Short() { + t.Skip("skipping stress test in short mode") + } + + setupTestDB(t) + seedStressData(t, 1000) + server := newTestServer() + + // Test basic filter on large dataset + start := time.Now() + req := httptest.NewRequest("GET", "/stream", nil) + rr := httptest.NewRecorder() + server.ServeHTTP(rr, req) + elapsed := time.Since(start) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d", rr.Code) + } + + var items []item.Item + if err := json.NewDecoder(rr.Body).Decode(&items); err != nil { + t.Fatalf("decode error: %v", err) + } + + if len(items) != 15 { + t.Errorf("expected 15 items (LIMIT), got %d", len(items)) + } + + t.Logf("filter on 1000 items completed in %v", elapsed) + if elapsed > 2*time.Second { + t.Errorf("large dataset filter took too long: %v (threshold: 2s)", elapsed) + } + + // Test pagination + start = time.Now() + lastID := items[len(items)-1].Id + req = httptest.NewRequest("GET", fmt.Sprintf("/stream?max_id=%d", lastID), nil) + rr = httptest.NewRecorder() + server.ServeHTTP(rr, req) + elapsed = time.Since(start) + + if rr.Code != http.StatusOK { + t.Fatalf("pagination: expected 200, got %d", rr.Code) + } + + var page2 []item.Item + json.NewDecoder(rr.Body).Decode(&page2) + if len(page2) != 15 { + t.Errorf("pagination: expected 15 items, got %d", len(page2)) + } + + t.Logf("paginated filter completed in %v", elapsed) + + // Test FTS on large dataset + start = time.Now() + req = httptest.NewRequest("GET", "/stream?q=Bench", nil) + rr = httptest.NewRecorder() + server.ServeHTTP(rr, req) + elapsed = time.Since(start) + + if rr.Code != http.StatusOK { + t.Fatalf("FTS: expected 200, got %d", rr.Code) + } + + t.Logf("FTS on 1000 items completed in %v", elapsed) + if elapsed > 2*time.Second { + t.Errorf("FTS on large dataset took too long: %v (threshold: 2s)", elapsed) + } +} + +func seedStressData(t *testing.T, count int) { + t.Helper() + f := &feed.Feed{Url: "http://example.com/bench", Title: "Bench Feed", Category: "tech"} + f.Create() + + for i := 0; i < count; i++ { + it := &item.Item{ + Title: fmt.Sprintf("Bench Item %d", i), + Url: fmt.Sprintf("http://example.com/bench/%d", i), + Description: fmt.Sprintf("

Description for bench item %d with HTML

", i), + PublishDate: "2024-01-01 00:00:00", + FeedId: f.Id, + } + _ = it.Create() + } +} diff --git a/frontend-vanilla/src/perf/renderItems.perf.test.ts b/frontend-vanilla/src/perf/renderItems.perf.test.ts new file mode 100644 index 0000000..3dd2ed7 --- /dev/null +++ b/frontend-vanilla/src/perf/renderItems.perf.test.ts @@ -0,0 +1,90 @@ +import { describe, it, expect } from 'vitest'; +import { createFeedItem } from '../components/FeedItem'; +import type { Item } from '../types'; + +function makeItem(id: number): Item { + return { + _id: id, + feed_id: 1, + title: `Test Item ${id}`, + url: `https://example.com/item/${id}`, + description: `

Description for item ${id} with bold and link

`, + publish_date: '2024-01-01T00:00:00Z', + read: id % 3 === 0, + starred: id % 5 === 0, + feed_title: 'Test Feed', + }; +} + +describe('renderItems performance', () => { + it('createFeedItem renders 100 items under 50ms', () => { + const items = Array.from({ length: 100 }, (_, i) => makeItem(i)); + + const start = performance.now(); + const html = items.map(item => createFeedItem(item)).join(''); + const elapsed = performance.now() - start; + + expect(html).toBeTruthy(); + expect(html).toContain('feed-item'); + expect(elapsed).toBeLessThan(50); + }); + + it('createFeedItem renders 500 items under 200ms', () => { + const items = Array.from({ length: 500 }, (_, i) => makeItem(i)); + + const start = performance.now(); + const html = items.map(item => createFeedItem(item)).join(''); + const elapsed = performance.now() - start; + + expect(html).toBeTruthy(); + expect(elapsed).toBeLessThan(200); + }); + + it('createFeedItem renders 1000 items under 100ms', () => { + const items = Array.from({ length: 1000 }, (_, i) => makeItem(i)); + + const start = performance.now(); + const results: string[] = []; + for (const item of items) { + results.push(createFeedItem(item)); + } + const elapsed = performance.now() - start; + + expect(results.length).toBe(1000); + expect(elapsed).toBeLessThan(100); + }); + + it('DOM insertion of 100 items under 200ms', () => { + const items = Array.from({ length: 100 }, (_, i) => makeItem(i)); + const html = items.map(item => createFeedItem(item)).join(''); + + const container = document.createElement('ul'); + document.body.appendChild(container); + + const start = performance.now(); + container.innerHTML = html; + const elapsed = performance.now() - start; + + expect(container.children.length).toBe(100); + expect(elapsed).toBeLessThan(200); + + document.body.removeChild(container); + }); + + it('DOM insertion of 500 items under 500ms', () => { + const items = Array.from({ length: 500 }, (_, i) => makeItem(i)); + const html = items.map(item => createFeedItem(item)).join(''); + + const container = document.createElement('ul'); + document.body.appendChild(container); + + const start = performance.now(); + container.innerHTML = html; + const elapsed = performance.now() - start; + + expect(container.children.length).toBe(500); + expect(elapsed).toBeLessThan(500); + + document.body.removeChild(container); + }); +}); diff --git a/frontend-vanilla/src/perf/store.perf.test.ts b/frontend-vanilla/src/perf/store.perf.test.ts new file mode 100644 index 0000000..734e132 --- /dev/null +++ b/frontend-vanilla/src/perf/store.perf.test.ts @@ -0,0 +1,124 @@ +import { describe, it, expect } from 'vitest'; +import { Store } from '../store'; +import type { Item, Feed, Category } from '../types'; + +function makeItem(id: number): Item { + return { + _id: id, + feed_id: 1, + title: `Test Item ${id}`, + url: `https://example.com/item/${id}`, + description: `Description for item ${id}`, + publish_date: '2024-01-01T00:00:00Z', + read: false, + starred: false, + feed_title: 'Test Feed', + }; +} + +function makeFeed(id: number): Feed { + return { + _id: id, + url: `https://example.com/feed/${id}`, + web_url: `https://example.com/${id}`, + title: `Feed ${id}`, + category: `cat-${id % 5}`, + }; +} + +describe('store performance', () => { + it('setItems with 500 items + event dispatch under 10ms', () => { + const store = new Store(); + const items = Array.from({ length: 500 }, (_, i) => makeItem(i)); + + let eventFired = false; + store.on('items-updated', () => { eventFired = true; }); + + const start = performance.now(); + store.setItems(items); + const elapsed = performance.now() - start; + + expect(store.items.length).toBe(500); + expect(eventFired).toBe(true); + expect(elapsed).toBeLessThan(10); + }); + + it('setItems append 500 items to existing 500 under 10ms', () => { + const store = new Store(); + const initial = Array.from({ length: 500 }, (_, i) => makeItem(i)); + const more = Array.from({ length: 500 }, (_, i) => makeItem(i + 500)); + + store.setItems(initial); + + const start = performance.now(); + store.setItems(more, true); + const elapsed = performance.now() - start; + + expect(store.items.length).toBe(1000); + expect(elapsed).toBeLessThan(10); + }); + + it('setFeeds with 200 feeds under 5ms', () => { + const store = new Store(); + const feeds = Array.from({ length: 200 }, (_, i) => makeFeed(i)); + + let eventFired = false; + store.on('feeds-updated', () => { eventFired = true; }); + + const start = performance.now(); + store.setFeeds(feeds); + const elapsed = performance.now() - start; + + expect(store.feeds.length).toBe(200); + expect(eventFired).toBe(true); + expect(elapsed).toBeLessThan(5); + }); + + it('rapid filter changes (100 toggles) under 50ms', () => { + const store = new Store(); + const filters: Array<'unread' | 'all' | 'starred'> = ['unread', 'all', 'starred']; + let eventCount = 0; + store.on('filter-updated', () => { eventCount++; }); + + const start = performance.now(); + for (let i = 0; i < 100; i++) { + store.setFilter(filters[i % 3]); + } + const elapsed = performance.now() - start; + + expect(eventCount).toBeGreaterThan(0); + expect(elapsed).toBeLessThan(50); + }); + + it('rapid search query changes (100 updates) under 50ms', () => { + const store = new Store(); + let eventCount = 0; + store.on('search-updated', () => { eventCount++; }); + + const start = performance.now(); + for (let i = 0; i < 100; i++) { + store.setSearchQuery(`query-${i}`); + } + const elapsed = performance.now() - start; + + expect(eventCount).toBe(100); + expect(elapsed).toBeLessThan(50); + }); + + it('multiple listeners (50) on items-updated under 10ms', () => { + const store = new Store(); + const items = Array.from({ length: 100 }, (_, i) => makeItem(i)); + let totalCalls = 0; + + for (let i = 0; i < 50; i++) { + store.on('items-updated', () => { totalCalls++; }); + } + + const start = performance.now(); + store.setItems(items); + const elapsed = performance.now() - start; + + expect(totalCalls).toBe(50); + expect(elapsed).toBeLessThan(10); + }); +}); diff --git a/internal/crawler/crawler_bench_test.go b/internal/crawler/crawler_bench_test.go new file mode 100644 index 0000000..90d92cc --- /dev/null +++ b/internal/crawler/crawler_bench_test.go @@ -0,0 +1,141 @@ +package crawler + +import ( + "net/http" + "net/http/httptest" + "testing" + + "adammathes.com/neko/config" + "adammathes.com/neko/internal/safehttp" + "adammathes.com/neko/models" + "adammathes.com/neko/models/feed" + + "github.com/mmcdole/gofeed" +) + +const testRSSFeed = ` + + + Bench Test Feed + https://example.com + A feed for benchmarking + + Article One + https://example.com/1 + <p>First article with <b>bold</b> and <a href="https://example.com">link</a></p> + Mon, 01 Jan 2024 00:00:00 +0000 + + + Article Two + https://example.com/2 + <p>Second article with some content</p> + Tue, 02 Jan 2024 00:00:00 +0000 + + + Article Three + https://example.com/3 + <p>Third article</p> + Wed, 03 Jan 2024 00:00:00 +0000 + + + Article Four + https://example.com/4 + <p>Fourth article with <img src="https://example.com/img.jpg"></p> + Thu, 04 Jan 2024 00:00:00 +0000 + + + Article Five + https://example.com/5 + <p>Fifth article</p> + Fri, 05 Jan 2024 00:00:00 +0000 + + + Article Six + https://example.com/6 + <p>Sixth article</p> + Sat, 06 Jan 2024 00:00:00 +0000 + + + Article Seven + https://example.com/7 + <p>Seventh article</p> + Sun, 07 Jan 2024 00:00:00 +0000 + + + Article Eight + https://example.com/8 + <p>Eighth article</p> + Mon, 08 Jan 2024 00:00:00 +0000 + + + Article Nine + https://example.com/9 + <p>Ninth article with longer content to simulate a real feed item that has more text in it</p> + Tue, 09 Jan 2024 00:00:00 +0000 + + + Article Ten + https://example.com/10 + <p>Tenth article</p> + Wed, 10 Jan 2024 00:00:00 +0000 + + +` + +func BenchmarkParseFeed(b *testing.B) { + fp := gofeed.NewParser() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, err := fp.ParseString(testRSSFeed) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkCrawlFeedMocked(b *testing.B) { + safehttp.AllowLocal = true + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/rss+xml") + w.WriteHeader(200) + w.Write([]byte(testRSSFeed)) + })) + defer ts.Close() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + // Each iteration needs a fresh DB since CrawlFeed inserts items + config.Config.DBFile = ":memory:" + models.InitDB() + + f := &feed.Feed{Url: ts.URL, Title: "Bench Feed"} + f.Create() + + ch := make(chan string, 1) + CrawlFeed(f, ch) + <-ch + + models.DB.Close() + } +} + +func BenchmarkGetFeedContent(b *testing.B) { + safehttp.AllowLocal = true + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/rss+xml") + w.WriteHeader(200) + w.Write([]byte(testRSSFeed)) + })) + defer ts.Close() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + content := GetFeedContent(ts.URL) + if content == "" { + b.Fatal("empty content") + } + } +} diff --git a/models/item/item_bench_test.go b/models/item/item_bench_test.go new file mode 100644 index 0000000..5e66f2d --- /dev/null +++ b/models/item/item_bench_test.go @@ -0,0 +1,219 @@ +package item + +import ( + "fmt" + "path/filepath" + "strings" + "testing" + + "adammathes.com/neko/config" + "adammathes.com/neko/models" +) + +func setupBenchDB(b *testing.B) { + b.Helper() + config.Config.DBFile = filepath.Join(b.TempDir(), "bench.db") + models.InitDB() + b.Cleanup(func() { + if models.DB != nil { + models.DB.Close() + } + }) +} + +func createBenchFeed(b *testing.B) int64 { + b.Helper() + res, err := models.DB.Exec("INSERT INTO feed(url, title, category) VALUES(?, ?, ?)", + "https://example.com/feed", "Bench Feed", "tech") + if err != nil { + b.Fatal(err) + } + id, _ := res.LastInsertId() + return id +} + +func seedBenchItems(b *testing.B, feedID int64, count int) { + b.Helper() + for i := 0; i < count; i++ { + _, err := models.DB.Exec( + `INSERT INTO item(title, url, description, publish_date, feed_id, read_state, starred) + VALUES(?, ?, ?, datetime('now'), ?, 0, 0)`, + fmt.Sprintf("Bench Item %d", i), + fmt.Sprintf("https://example.com/item/%d", i), + fmt.Sprintf("

Description for item %d with bold and link

", i), + feedID, + ) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkItemCreate(b *testing.B) { + setupBenchDB(b) + feedID := createBenchFeed(b) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + item := &Item{ + Title: fmt.Sprintf("Item %d", i), + Url: fmt.Sprintf("https://example.com/bench/%d", i), + Description: "

Benchmark item description

", + PublishDate: "2024-01-01 00:00:00", + FeedId: feedID, + } + _ = item.Create() + } +} + +func BenchmarkItemCreateBatch100(b *testing.B) { + setupBenchDB(b) + feedID := createBenchFeed(b) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + for j := 0; j < 100; j++ { + item := &Item{ + Title: fmt.Sprintf("Batch %d Item %d", i, j), + Url: fmt.Sprintf("https://example.com/batch/%d/%d", i, j), + Description: "

Batch item description

", + PublishDate: "2024-01-01 00:00:00", + FeedId: feedID, + } + _ = item.Create() + } + } +} + +func BenchmarkFilter_Empty(b *testing.B) { + setupBenchDB(b) + _ = createBenchFeed(b) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = Filter(0, nil, "", false, false, 0, "") + } +} + +func BenchmarkFilter_15Items(b *testing.B) { + setupBenchDB(b) + feedID := createBenchFeed(b) + seedBenchItems(b, feedID, 15) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = Filter(0, nil, "", false, false, 0, "") + } +} + +func BenchmarkFilter_WithFTS(b *testing.B) { + setupBenchDB(b) + feedID := createBenchFeed(b) + seedBenchItems(b, feedID, 50) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = Filter(0, nil, "", false, false, 0, "Bench") + } +} + +func BenchmarkFilter_WithImageProxy(b *testing.B) { + setupBenchDB(b) + feedID := createBenchFeed(b) + + // Seed items with image-heavy descriptions + for i := 0; i < 15; i++ { + _, err := models.DB.Exec( + `INSERT INTO item(title, url, description, publish_date, feed_id, read_state, starred) + VALUES(?, ?, ?, datetime('now'), ?, 0, 0)`, + fmt.Sprintf("Image Item %d", i), + fmt.Sprintf("https://example.com/img/%d", i), + `

Text with images a and b

`, + feedID, + ) + if err != nil { + b.Fatal(err) + } + } + + config.Config.ProxyImages = true + b.Cleanup(func() { config.Config.ProxyImages = false }) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = Filter(0, nil, "", false, false, 0, "") + } +} + +func BenchmarkFilterPolicy(b *testing.B) { + html := `

Hello world with link and test and

` + + b.ResetTimer() + for i := 0; i < b.N; i++ { + p := filterPolicy() + _ = p.Sanitize(html) + } +} + +func BenchmarkRewriteImages(b *testing.B) { + html := `

Text 1 more text + 2 + 3 + 4 + 5

` + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = rewriteImages(html) + } +} + +func BenchmarkItemSave(b *testing.B) { + setupBenchDB(b) + feedID := createBenchFeed(b) + + item := &Item{ + Title: "Save Bench Item", + Url: "https://example.com/save-bench", + Description: "

Item to update

", + PublishDate: "2024-01-01 00:00:00", + FeedId: feedID, + } + if err := item.Create(); err != nil { + b.Fatal(err) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + item.ReadState = !item.ReadState + item.Save() + } +} + +func BenchmarkFilter_LargeDataset(b *testing.B) { + setupBenchDB(b) + feedID := createBenchFeed(b) + + // Bulk insert 500 items for a realistic dataset + var sb strings.Builder + for i := 0; i < 500; i++ { + if i > 0 { + sb.WriteString(",") + } + sb.WriteString(fmt.Sprintf( + "('Item %d', 'https://example.com/large/%d', '

Description %d

', datetime('now'), %d, 0, 0)", + i, i, i, feedID, + )) + } + _, err := models.DB.Exec( + "INSERT INTO item(title, url, description, publish_date, feed_id, read_state, starred) VALUES " + sb.String(), + ) + if err != nil { + b.Fatal(err) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = Filter(0, nil, "", false, false, 0, "") + } +} diff --git a/web/web_bench_test.go b/web/web_bench_test.go new file mode 100644 index 0000000..7897fc7 --- /dev/null +++ b/web/web_bench_test.go @@ -0,0 +1,92 @@ +package web + +import ( + "net/http" + "net/http/httptest" + "strings" + "testing" + + "adammathes.com/neko/config" +) + +func BenchmarkGzipMiddleware(b *testing.B) { + // Simulate a JSON API response + jsonPayload := `[` + strings.Repeat(`{"_id":"1","title":"Test Item","url":"https://example.com","description":"

This is a test description with enough content to be worth compressing in a real scenario

","read":false,"starred":false},`, 14) + + `{"_id":"15","title":"Last Item","url":"https://example.com/15","description":"

Final item

","read":false,"starred":false}]` + + handler := GzipMiddleware(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(jsonPayload)) + })) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + req := httptest.NewRequest("GET", "/api/stream", nil) + req.Header.Set("Accept-Encoding", "gzip") + rr := httptest.NewRecorder() + handler.ServeHTTP(rr, req) + } +} + +func BenchmarkSecurityHeaders(b *testing.B) { + handler := SecurityHeadersMiddleware(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + })) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + req := httptest.NewRequest("GET", "/", nil) + rr := httptest.NewRecorder() + handler.ServeHTTP(rr, req) + } +} + +func BenchmarkCSRFMiddleware(b *testing.B) { + cfg := &config.Settings{SecureCookies: false} + handler := CSRFMiddleware(cfg, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + })) + + // Pre-generate a CSRF token by doing an initial GET + initReq := httptest.NewRequest("GET", "/", nil) + initRR := httptest.NewRecorder() + handler.ServeHTTP(initRR, initReq) + + var csrfCookie *http.Cookie + for _, c := range initRR.Result().Cookies() { + if c.Name == "csrf_token" { + csrfCookie = c + break + } + } + if csrfCookie == nil { + b.Fatal("no csrf cookie set") + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + req := httptest.NewRequest("POST", "/api/stream", nil) + req.AddCookie(csrfCookie) + req.Header.Set("X-CSRF-Token", csrfCookie.Value) + rr := httptest.NewRecorder() + handler.ServeHTTP(rr, req) + } +} + +func BenchmarkFullMiddlewareStack(b *testing.B) { + cfg := &config.Settings{SecureCookies: false} + inner := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(`{"status":"ok"}`)) + }) + + handler := SecurityHeadersMiddleware(CSRFMiddleware(cfg, GzipMiddleware(inner))) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + req := httptest.NewRequest("GET", "/", nil) + req.Header.Set("Accept-Encoding", "gzip") + rr := httptest.NewRecorder() + handler.ServeHTTP(rr, req) + } +} -- cgit v1.2.3