From 4c570c223e944a148dc81a4e1ee25d6ab524ae64 Mon Sep 17 00:00:00 2001 From: Adam Mathes Date: Sun, 15 Feb 2026 11:13:44 -0800 Subject: Document background crawler behavior and update default crawl minutes to 60 (or disable with 0) --- README.md | 38 +++++++++++++++++++++++++++++++++----- cmd/neko/main.go | 6 +++--- 2 files changed, 36 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 5244803..759a52e 100644 --- a/README.md +++ b/README.md @@ -191,11 +191,23 @@ You can add feeds directly from the command line for convenience -- ## Crawl Feeds -Update feeds from the command line with -- +By default, when running the web server (`neko`), a background crawler runs every **60 minutes** to fetch new items. + +You can customize this interval using the `--minutes` flag: + + $ neko --minutes=30 # Crawl every 30 minutes + +To **disable** background crawling, set minutes to 0: + + $ neko --minutes=0 # Run web server only, no background crawling + +### Manual Update + +You can manually trigger a feed update from the command line without starting the server: $ neko --update -This will fetch, download, parse, and store in the database your feeds. +This will fetch, download, parse, and store in the database your feeds once and then exit. ## Export @@ -209,6 +221,18 @@ Export is also available in the web interface. Import of OPML and other things is available via the web interface. +## Purge Items + +You can delete old items to free up database space. By default, only **read** items are deleted. + + $ neko --purge=30 # Delete read items older than 30 days + +To include **unread** items in the purge: + + $ neko --purge=30 --purge-unread + +**Note:** Starred items are never deleted. + # All Command Line Options View all command line options with `-h` or `--help` @@ -227,14 +251,18 @@ Usage of neko: -h, --help print usage information -s, --http int - HTTP port to serve on + HTTP port to serve on -i, --imageproxy rewrite and proxy all image requests for privacy (experimental) -m, --minutes int - minutes between crawling feeds + minutes between crawling feeds (default -1, uses 60 if unset) -p, --password string password to access web interface - --secure-cookies + --purge int + purge read items older than N days + --purge-unread + when purging, also include unread items + --secure-cookies set Secure flag on cookies (requires HTTPS) -u, --update fetch feeds and store new items diff --git a/cmd/neko/main.go b/cmd/neko/main.go index cc724df..68bc971 100644 --- a/cmd/neko/main.go +++ b/cmd/neko/main.go @@ -63,8 +63,8 @@ func Run(args []string) error { f.IntVar(&port, "http", 0, "HTTP port to serve on") f.IntVar(&port, "s", 0, "HTTP port to serve on (short)") - f.IntVar(&minutes, "minutes", 0, "minutes between crawling feeds") - f.IntVar(&minutes, "m", 0, "minutes between crawling feeds (short)") + f.IntVar(&minutes, "minutes", -1, "minutes between crawling feeds") + f.IntVar(&minutes, "m", -1, "minutes between crawling feeds (short)") f.BoolVar(&proxyImages, "imageproxy", false, "rewrite and proxy all image requests") f.BoolVar(&proxyImages, "i", false, "rewrite and proxy all image requests (short)") @@ -117,7 +117,7 @@ func Run(args []string) error { config.Config.DigestPassword = password } - if minutes != 0 { + if minutes != -1 { config.Config.CrawlMinutes = minutes } -- cgit v1.2.3