diff options
| author | google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> | 2026-02-18 03:19:31 +0000 |
|---|---|---|
| committer | google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> | 2026-02-18 03:19:31 +0000 |
| commit | 6f199f2eec236211f2e9d1e320fd536f459f042a (patch) | |
| tree | 5e141f35966ef933bf91ff6b99f1927b375fc81a /internal/crawler/security_test.go | |
| parent | 9db36ae402dbb74f7223a4efc8b2483086684e38 (diff) | |
| download | neko-6f199f2eec236211f2e9d1e320fd536f459f042a.tar.gz neko-6f199f2eec236211f2e9d1e320fd536f459f042a.tar.bz2 neko-6f199f2eec236211f2e9d1e320fd536f459f042a.zip | |
Fix unbounded memory usage in crawler (DoS)
Co-authored-by: adammathes <868470+adammathes@users.noreply.github.com>
Diffstat (limited to 'internal/crawler/security_test.go')
| -rw-r--r-- | internal/crawler/security_test.go | 34 |
1 files changed, 34 insertions, 0 deletions
diff --git a/internal/crawler/security_test.go b/internal/crawler/security_test.go new file mode 100644 index 0000000..198f7ee --- /dev/null +++ b/internal/crawler/security_test.go @@ -0,0 +1,34 @@ +package crawler + +import ( + "net/http" + "net/http/httptest" + "strings" + "testing" + + "adammathes.com/neko/internal/safehttp" +) + +func init() { + safehttp.AllowLocal = true +} + +func TestGetFeedContentLimit(t *testing.T) { + // 10MB limit expected + limit := 10 * 1024 * 1024 + // 11MB payload + size := limit + 1024*1024 + largeBody := strings.Repeat("a", size) + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(200) + w.Write([]byte(largeBody)) + })) + defer ts.Close() + + content := GetFeedContent(ts.URL) + + if len(content) != limit { + t.Errorf("Expected content length %d, got %d", limit, len(content)) + } +} |
