diff options
| author | google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> | 2026-02-18 03:19:31 +0000 |
|---|---|---|
| committer | google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> | 2026-02-18 03:19:31 +0000 |
| commit | 6f199f2eec236211f2e9d1e320fd536f459f042a (patch) | |
| tree | 5e141f35966ef933bf91ff6b99f1927b375fc81a /internal/crawler/crawler.go | |
| parent | 9db36ae402dbb74f7223a4efc8b2483086684e38 (diff) | |
| download | neko-6f199f2eec236211f2e9d1e320fd536f459f042a.tar.gz neko-6f199f2eec236211f2e9d1e320fd536f459f042a.tar.bz2 neko-6f199f2eec236211f2e9d1e320fd536f459f042a.zip | |
Fix unbounded memory usage in crawler (DoS)
Co-authored-by: adammathes <868470+adammathes@users.noreply.github.com>
Diffstat (limited to 'internal/crawler/crawler.go')
| -rw-r--r-- | internal/crawler/crawler.go | 3 |
1 files changed, 2 insertions, 1 deletions
diff --git a/internal/crawler/crawler.go b/internal/crawler/crawler.go index 4f5de98..e664e06 100644 --- a/internal/crawler/crawler.go +++ b/internal/crawler/crawler.go @@ -15,6 +15,7 @@ import ( ) const MAX_CRAWLERS = 5 +const MAX_FEED_SIZE = 10 * 1024 * 1024 // 10MB func Crawl() { crawlJobs := make(chan *feed.Feed, 100) @@ -88,7 +89,7 @@ func GetFeedContent(feedURL string) string { return "" } - bodyBytes, err := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(io.LimitReader(resp.Body, MAX_FEED_SIZE)) if err != nil { return "" } |
