aboutsummaryrefslogtreecommitdiffstats
path: root/api
diff options
context:
space:
mode:
authorAdam Mathes <adam@adammathes.com>2026-02-12 20:26:47 -0800
committerAdam Mathes <adam@adammathes.com>2026-02-12 20:26:47 -0800
commit281e3ec0a8cfcf06d8e358e5d397a828b9a6f456 (patch)
treebd4acfec418d4143699d0ac4db572af29003135e /api
parent16186a344a7b61633cb7342aac37ac56ad83d261 (diff)
downloadneko-281e3ec0a8cfcf06d8e358e5d397a828b9a6f456.tar.gz
neko-281e3ec0a8cfcf06d8e358e5d397a828b9a6f456.tar.bz2
neko-281e3ec0a8cfcf06d8e358e5d397a828b9a6f456.zip
Refactor backend to a clean REST API
- Created new 'api' package with testable router and RESTful handlers - Handlers in 'api' use proper HTTP methods and status codes - Standardized JSON responses and error handling - Refactored 'web' package to delegate logic to 'api' - Maintained backward compatibility for legacy frontend routes - Simplified 'web/web_test.go' and added comprehensive 'api/api_test.go' - All tests passing with improved modularity
Diffstat (limited to 'api')
-rw-r--r--api/api.go207
-rw-r--r--api/api_test.go151
2 files changed, 358 insertions, 0 deletions
diff --git a/api/api.go b/api/api.go
new file mode 100644
index 0000000..b9e63ba
--- /dev/null
+++ b/api/api.go
@@ -0,0 +1,207 @@
+package api
+
+import (
+ "encoding/json"
+ "log"
+ "net/http"
+ "strconv"
+ "strings"
+
+ "adammathes.com/neko/crawler"
+ "adammathes.com/neko/exporter"
+ "adammathes.com/neko/models/feed"
+ "adammathes.com/neko/models/item"
+)
+
+// NewRouter returns a configured mux with all API routes.
+func NewRouter() *http.ServeMux {
+ mux := http.NewServeMux()
+ mux.HandleFunc("/stream", HandleStream)
+ mux.HandleFunc("/item/", HandleItem)
+ mux.HandleFunc("/feed", HandleFeed)
+ mux.HandleFunc("/feed/", HandleFeed)
+ mux.HandleFunc("/tag", HandleCategory)
+ mux.HandleFunc("/export/", HandleExport)
+ mux.HandleFunc("/crawl", HandleCrawl)
+ return mux
+}
+
+func jsonError(w http.ResponseWriter, msg string, code int) {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(code)
+ json.NewEncoder(w).Encode(map[string]string{"error": msg})
+}
+
+func jsonResponse(w http.ResponseWriter, data interface{}) {
+ w.Header().Set("Content-Type", "application/json")
+ json.NewEncoder(w).Encode(data)
+}
+
+func HandleStream(w http.ResponseWriter, r *http.Request) {
+ if r.Method != http.MethodGet {
+ jsonError(w, "method not allowed", http.StatusMethodNotAllowed)
+ return
+ }
+
+ maxID, _ := strconv.ParseInt(r.FormValue("max_id"), 10, 64)
+ feedID, _ := strconv.ParseInt(r.FormValue("feed_id"), 10, 64)
+
+ // Backward compatibility with feed_url if feed_id is not provided
+ if feedID == 0 && r.FormValue("feed_url") != "" {
+ var f feed.Feed
+ f.ByUrl(r.FormValue("feed_url"))
+ feedID = f.Id
+ }
+
+ category := r.FormValue("tag")
+ unreadOnly := r.FormValue("read_filter") != "all"
+ starredOnly := r.FormValue("starred") == "1" || r.FormValue("starred") == "true"
+ searchQuery := r.FormValue("q")
+
+ if searchQuery != "" {
+ unreadOnly = false
+ }
+
+ items, err := item.Filter(maxID, feedID, category, unreadOnly, starredOnly, 0, searchQuery)
+ if err != nil {
+ log.Println(err)
+ jsonError(w, "failed to filter items", http.StatusInternalServerError)
+ return
+ }
+
+ jsonResponse(w, items)
+}
+
+func HandleItem(w http.ResponseWriter, r *http.Request) {
+ idStr := strings.TrimPrefix(r.URL.Path, "/item/")
+ id, _ := strconv.ParseInt(idStr, 10, 64)
+
+ if id == 0 {
+ jsonError(w, "invalid item id", http.StatusBadRequest)
+ return
+ }
+
+ switch r.Method {
+ case http.MethodPut:
+ var i item.Item
+ if err := json.NewDecoder(r.Body).Decode(&i); err != nil {
+ jsonError(w, "invalid json", http.StatusBadRequest)
+ return
+ }
+ i.Id = id
+ i.Save()
+ jsonResponse(w, i)
+
+ case http.MethodPost, http.MethodGet:
+ // Full text extraction - supporting GET for backward compatibility
+ i := item.ItemById(id)
+ if i == nil {
+ jsonError(w, "item not found", http.StatusNotFound)
+ return
+ }
+ if i.FullContent == "" {
+ i.GetFullContent()
+ }
+ jsonResponse(w, i)
+
+ default:
+ jsonError(w, "method not allowed", http.StatusMethodNotAllowed)
+ }
+}
+
+func HandleFeed(w http.ResponseWriter, r *http.Request) {
+ switch r.Method {
+ case http.MethodGet:
+ feeds, err := feed.All()
+ if err != nil {
+ log.Println(err)
+ jsonError(w, "failed to fetch feeds", http.StatusInternalServerError)
+ return
+ }
+ jsonResponse(w, feeds)
+
+ case http.MethodPost:
+ var f feed.Feed
+ if err := json.NewDecoder(r.Body).Decode(&f); err != nil {
+ jsonError(w, "invalid json", http.StatusBadRequest)
+ return
+ }
+ if f.Url == "" {
+ jsonError(w, "url required", http.StatusBadRequest)
+ return
+ }
+ err := feed.NewFeed(f.Url)
+ if err != nil {
+ jsonError(w, "failed to create feed", http.StatusInternalServerError)
+ return
+ }
+ f.ByUrl(f.Url)
+ ch := make(chan string)
+ go func() {
+ crawler.CrawlFeed(&f, ch)
+ log.Println(<-ch)
+ }()
+ w.WriteHeader(http.StatusCreated)
+ jsonResponse(w, f)
+
+ case http.MethodPut:
+ var f feed.Feed
+ if err := json.NewDecoder(r.Body).Decode(&f); err != nil {
+ jsonError(w, "invalid json", http.StatusBadRequest)
+ return
+ }
+ f.Update()
+ jsonResponse(w, f)
+
+ case http.MethodDelete:
+ idStr := strings.TrimPrefix(r.URL.Path, "/feed/")
+ id, _ := strconv.ParseInt(idStr, 10, 64)
+ if id == 0 {
+ jsonError(w, "invalid feed id", http.StatusBadRequest)
+ return
+ }
+ f := &feed.Feed{Id: id}
+ f.Delete()
+ w.WriteHeader(http.StatusNoContent)
+
+ default:
+ jsonError(w, "method not allowed", http.StatusMethodNotAllowed)
+ }
+}
+
+func HandleCategory(w http.ResponseWriter, r *http.Request) {
+ if r.Method != http.MethodGet {
+ jsonError(w, "method not allowed", http.StatusMethodNotAllowed)
+ return
+ }
+ categories, err := feed.Categories()
+ if err != nil {
+ log.Println(err)
+ jsonError(w, "failed to fetch categories", http.StatusInternalServerError)
+ return
+ }
+ jsonResponse(w, categories)
+}
+
+func HandleExport(w http.ResponseWriter, r *http.Request) {
+ if r.Method != http.MethodGet {
+ jsonError(w, "method not allowed", http.StatusMethodNotAllowed)
+ return
+ }
+ format := strings.TrimPrefix(r.URL.Path, "/export/")
+ if format == "" {
+ jsonError(w, "format required", http.StatusBadRequest)
+ return
+ }
+ w.Header().Set("Content-Type", "text/plain") // exporter handles formats internally
+ w.Write([]byte(exporter.ExportFeeds(format)))
+}
+
+func HandleCrawl(w http.ResponseWriter, r *http.Request) {
+ if r.Method != http.MethodPost {
+ jsonError(w, "method not allowed", http.StatusMethodNotAllowed)
+ return
+ }
+ go crawler.Crawl()
+ jsonResponse(w, map[string]string{"message": "crawl started"})
+}
diff --git a/api/api_test.go b/api/api_test.go
new file mode 100644
index 0000000..45b0123
--- /dev/null
+++ b/api/api_test.go
@@ -0,0 +1,151 @@
+package api
+
+import (
+ "bytes"
+ "encoding/json"
+ "net/http"
+ "net/http/httptest"
+ "strconv"
+ "testing"
+
+ "adammathes.com/neko/config"
+ "adammathes.com/neko/models"
+ "adammathes.com/neko/models/feed"
+ "adammathes.com/neko/models/item"
+)
+
+func setupTestDB(t *testing.T) {
+ t.Helper()
+ config.Config.DBFile = ":memory:"
+ models.InitDB()
+ t.Cleanup(func() {
+ if models.DB != nil {
+ models.DB.Close()
+ }
+ })
+}
+
+func seedData(t *testing.T) {
+ t.Helper()
+ f := &feed.Feed{Url: "http://example.com", Title: "Test Feed", Category: "tech"}
+ f.Create()
+
+ i := &item.Item{
+ Title: "Test Item",
+ Url: "http://example.com/1",
+ FeedId: f.Id,
+ }
+ i.Create()
+}
+
+func TestStream(t *testing.T) {
+ setupTestDB(t)
+ seedData(t)
+ router := NewRouter()
+
+ req := httptest.NewRequest("GET", "/stream", nil)
+ rr := httptest.NewRecorder()
+ router.ServeHTTP(rr, req)
+
+ if rr.Code != http.StatusOK {
+ t.Errorf("expected 200, got %d", rr.Code)
+ }
+
+ var items []item.Item
+ json.NewDecoder(rr.Body).Decode(&items)
+ if len(items) != 1 {
+ t.Errorf("expected 1 item, got %d", len(items))
+ }
+}
+
+func TestFeedCRUD(t *testing.T) {
+ setupTestDB(t)
+ router := NewRouter()
+
+ // Create
+ f := feed.Feed{Url: "http://example.com", Title: "New Feed"}
+ b, _ := json.Marshal(f)
+ req := httptest.NewRequest("POST", "/feed", bytes.NewBuffer(b))
+ rr := httptest.NewRecorder()
+ router.ServeHTTP(rr, req)
+
+ if rr.Code != http.StatusCreated {
+ t.Errorf("expected 201, got %d", rr.Code)
+ }
+
+ // List
+ req = httptest.NewRequest("GET", "/feed", nil)
+ rr = httptest.NewRecorder()
+ router.ServeHTTP(rr, req)
+
+ var feeds []feed.Feed
+ json.NewDecoder(rr.Body).Decode(&feeds)
+ if len(feeds) != 1 {
+ t.Errorf("expected 1 feed, got %d", len(feeds))
+ }
+
+ feedID := feeds[0].Id
+
+ // Update
+ feeds[0].Title = "Updated Title"
+ b, _ = json.Marshal(feeds[0])
+ req = httptest.NewRequest("PUT", "/feed", bytes.NewBuffer(b))
+ rr = httptest.NewRecorder()
+ router.ServeHTTP(rr, req)
+
+ if rr.Code != http.StatusOK {
+ t.Errorf("expected 200, got %d", rr.Code)
+ }
+
+ // Delete
+ req = httptest.NewRequest("DELETE", "/feed/"+strconv.FormatInt(feedID, 10), nil)
+ rr = httptest.NewRecorder()
+ router.ServeHTTP(rr, req)
+
+ if rr.Code != http.StatusNoContent {
+ t.Errorf("expected 204, got %d", rr.Code)
+ }
+}
+
+func TestItemUpdate(t *testing.T) {
+ setupTestDB(t)
+ seedData(t)
+ router := NewRouter()
+
+ // Get an item first to know its ID
+ var id int64
+ err := models.DB.QueryRow("SELECT id FROM item LIMIT 1").Scan(&id)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ i := item.Item{Id: id, ReadState: true}
+ b, _ := json.Marshal(i)
+ req := httptest.NewRequest("PUT", "/item/"+strconv.FormatInt(id, 10), bytes.NewBuffer(b))
+ rr := httptest.NewRecorder()
+ router.ServeHTTP(rr, req)
+
+ if rr.Code != http.StatusOK {
+ t.Errorf("expected 200, got %d", rr.Code)
+ }
+}
+
+func TestGetCategories(t *testing.T) {
+ setupTestDB(t)
+ seedData(t)
+ router := NewRouter()
+
+ req := httptest.NewRequest("GET", "/tag", nil)
+ rr := httptest.NewRecorder()
+ router.ServeHTTP(rr, req)
+
+ if rr.Code != http.StatusOK {
+ t.Errorf("expected 200, got %d", rr.Code)
+ }
+
+ var cats []feed.Category
+ json.NewDecoder(rr.Body).Decode(&cats)
+ if len(cats) != 1 {
+ t.Errorf("expected 1 category, got %d", len(cats))
+ }
+}