diff options
| author | Claude <noreply@anthropic.com> | 2026-02-16 18:15:47 +0000 |
|---|---|---|
| committer | Claude <noreply@anthropic.com> | 2026-02-16 18:15:47 +0000 |
| commit | d4caf45b2b9ea6a3276de792cf6f73085e66b1ae (patch) | |
| tree | 3a8b7b1d9277ba0db4c946fea43043d65220fd8d /internal/crawler/crawler_bench_test.go | |
| parent | c9c5def76c3a3340373143f846454b795d296c82 (diff) | |
| download | neko-d4caf45b2b9ea6a3276de792cf6f73085e66b1ae.tar.gz neko-d4caf45b2b9ea6a3276de792cf6f73085e66b1ae.tar.bz2 neko-d4caf45b2b9ea6a3276de792cf6f73085e66b1ae.zip | |
Add performance benchmarks, stress tests, and frontend perf tests
Go benchmarks cover item CRUD/filter/sanitization, API endpoints (stream,
item update, feed list), middleware stack (gzip, security headers, CSRF),
and crawler pipeline (feed parsing, mocked crawl). Stress tests verify
concurrent reads/writes and large dataset handling. Frontend perf tests
measure template generation, DOM insertion, and store event throughput.
New Makefile targets: bench, bench-short, stress, test-perf.
https://claude.ai/code/session_01ChDVWFDrQoFjMYHpaLGr9s
Diffstat (limited to 'internal/crawler/crawler_bench_test.go')
| -rw-r--r-- | internal/crawler/crawler_bench_test.go | 141 |
1 files changed, 141 insertions, 0 deletions
diff --git a/internal/crawler/crawler_bench_test.go b/internal/crawler/crawler_bench_test.go new file mode 100644 index 0000000..90d92cc --- /dev/null +++ b/internal/crawler/crawler_bench_test.go @@ -0,0 +1,141 @@ +package crawler + +import ( + "net/http" + "net/http/httptest" + "testing" + + "adammathes.com/neko/config" + "adammathes.com/neko/internal/safehttp" + "adammathes.com/neko/models" + "adammathes.com/neko/models/feed" + + "github.com/mmcdole/gofeed" +) + +const testRSSFeed = `<?xml version="1.0" encoding="UTF-8"?> +<rss version="2.0"> + <channel> + <title>Bench Test Feed</title> + <link>https://example.com</link> + <description>A feed for benchmarking</description> + <item> + <title>Article One</title> + <link>https://example.com/1</link> + <description><p>First article with <b>bold</b> and <a href="https://example.com">link</a></p></description> + <pubDate>Mon, 01 Jan 2024 00:00:00 +0000</pubDate> + </item> + <item> + <title>Article Two</title> + <link>https://example.com/2</link> + <description><p>Second article with some content</p></description> + <pubDate>Tue, 02 Jan 2024 00:00:00 +0000</pubDate> + </item> + <item> + <title>Article Three</title> + <link>https://example.com/3</link> + <description><p>Third article</p></description> + <pubDate>Wed, 03 Jan 2024 00:00:00 +0000</pubDate> + </item> + <item> + <title>Article Four</title> + <link>https://example.com/4</link> + <description><p>Fourth article with <img src="https://example.com/img.jpg"></p></description> + <pubDate>Thu, 04 Jan 2024 00:00:00 +0000</pubDate> + </item> + <item> + <title>Article Five</title> + <link>https://example.com/5</link> + <description><p>Fifth article</p></description> + <pubDate>Fri, 05 Jan 2024 00:00:00 +0000</pubDate> + </item> + <item> + <title>Article Six</title> + <link>https://example.com/6</link> + <description><p>Sixth article</p></description> + <pubDate>Sat, 06 Jan 2024 00:00:00 +0000</pubDate> + </item> + <item> + <title>Article Seven</title> + <link>https://example.com/7</link> + <description><p>Seventh article</p></description> + <pubDate>Sun, 07 Jan 2024 00:00:00 +0000</pubDate> + </item> + <item> + <title>Article Eight</title> + <link>https://example.com/8</link> + <description><p>Eighth article</p></description> + <pubDate>Mon, 08 Jan 2024 00:00:00 +0000</pubDate> + </item> + <item> + <title>Article Nine</title> + <link>https://example.com/9</link> + <description><p>Ninth article with longer content to simulate a real feed item that has more text in it</p></description> + <pubDate>Tue, 09 Jan 2024 00:00:00 +0000</pubDate> + </item> + <item> + <title>Article Ten</title> + <link>https://example.com/10</link> + <description><p>Tenth article</p></description> + <pubDate>Wed, 10 Jan 2024 00:00:00 +0000</pubDate> + </item> + </channel> +</rss>` + +func BenchmarkParseFeed(b *testing.B) { + fp := gofeed.NewParser() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, err := fp.ParseString(testRSSFeed) + if err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkCrawlFeedMocked(b *testing.B) { + safehttp.AllowLocal = true + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/rss+xml") + w.WriteHeader(200) + w.Write([]byte(testRSSFeed)) + })) + defer ts.Close() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + // Each iteration needs a fresh DB since CrawlFeed inserts items + config.Config.DBFile = ":memory:" + models.InitDB() + + f := &feed.Feed{Url: ts.URL, Title: "Bench Feed"} + f.Create() + + ch := make(chan string, 1) + CrawlFeed(f, ch) + <-ch + + models.DB.Close() + } +} + +func BenchmarkGetFeedContent(b *testing.B) { + safehttp.AllowLocal = true + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/rss+xml") + w.WriteHeader(200) + w.Write([]byte(testRSSFeed)) + })) + defer ts.Close() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + content := GetFeedContent(ts.URL) + if content == "" { + b.Fatal("empty content") + } + } +} |
