From 82a0888c42570c353742e7e7f78e8ee30bd91dda Mon Sep 17 00:00:00 2001 From: Dmitry Verkhoturov Date: Tue, 7 Nov 2023 00:12:07 +0100 Subject: [PATCH] add pagination to GET /api/v1/find endpoint `format=tree` pagination provides top-level comments with all replies and returns the last top-level comment as `last_comment` to be used as `offset` for the next page. If comments and replies overflow the limit, the one stepping out of the limit will not be returned. If the first comment and its replies after the given offset overflow the limit, it will be returned with all the replies. `format=plain` pagination works by providing all comments and returning the last comment as `last_comment` to be used as `offset` for the next page. --- backend/app/rest/api/rest_public.go | 64 +++++++++++- backend/app/rest/api/rest_public_test.go | 122 +++++++++++++++++++---- backend/app/store/comment.go | 12 ++- backend/app/store/service/tree.go | 94 ++++++++++++++++- backend/app/store/service/tree_test.go | 28 +++--- backend/remark.rest | 6 ++ 6 files changed, 284 insertions(+), 42 deletions(-) diff --git a/backend/app/rest/api/rest_public.go b/backend/app/rest/api/rest_public.go index bb7023a97c..ebecff7ab0 100644 --- a/backend/app/rest/api/rest_public.go +++ b/backend/app/rest/api/rest_public.go @@ -16,6 +16,7 @@ import ( cache "github.com/go-pkgz/lcw/v2" log "github.com/go-pkgz/lgr" R "github.com/go-pkgz/rest" + "github.com/google/uuid" "github.com/skip2/go-qrcode" "github.com/umputun/remark42/backend/app/rest" @@ -48,10 +49,18 @@ type pubStore interface { Counts(siteID string, postIDs []string) ([]store.PostInfo, error) } -// GET /find?site=siteID&url=post-url&format=[tree|plain]&sort=[+/-time|+/-score|+/-controversy]&view=[user|all]&since=unix_ts_msec -// find comments for given post. Returns in tree or plain formats, sorted +// GET /find?site=siteID&url=post-url&format=[tree|plain]&sort=[+/-time|+/-score|+/-controversy]&view=[user|all]&since=unix_ts_msec&limit=100&offset_id={id} +// find comments for given post. Returns in tree or plain formats, sorted. // // When `url` parameter is not set (e.g. request is for site-wide comments), does not return deleted comments. +// +// When `limit` is set, first {limit} comments are returned. When `offset_id` is set, comments are returned starting +// after the comment with the given id. +// format="tree" limits comments by top-level comments and all their replies, +// and never returns parent comment with only part of replies. +// +// `count` in the response refers to total number of non-deleted comments, +// `count_left` to amount of comments left to be returned _including deleted_. func (s *public) findCommentsCtrl(w http.ResponseWriter, r *http.Request) { locator := store.Locator{SiteID: r.URL.Query().Get("site"), URL: r.URL.Query().Get("url")} sort := r.URL.Query().Get("sort") @@ -70,7 +79,24 @@ func (s *public) findCommentsCtrl(w http.ResponseWriter, r *http.Request) { since = time.Time{} // since doesn't make sense for tree } - log.Printf("[DEBUG] get comments for %+v, sort %s, format %s, since %v", locator, sort, format, since) + limitParam := r.URL.Query().Get("limit") + var limit int + if limitParam != "" { + if limit, err = strconv.Atoi(limitParam); err != nil { + rest.SendErrorJSON(w, r, http.StatusBadRequest, err, "bad limit value", rest.ErrCommentNotFound) + return + } + } + + offsetID := r.URL.Query().Get("offset_id") + if offsetID != "" { + if _, err = uuid.Parse(offsetID); err != nil { + rest.SendErrorJSON(w, r, http.StatusBadRequest, err, "bad offset_id value", rest.ErrCommentNotFound) + return + } + } + + log.Printf("[DEBUG] get comments for %+v, sort %s, format %s, since %v, limit %d, offset %s", locator, sort, format, since, limit, offsetID) key := cache.NewKey(locator.SiteID).ID(URLKeyWithUser(r)).Scopes(locator.SiteID, locator.URL) data, err := s.cache.Get(key, func() ([]byte, error) { @@ -102,12 +128,20 @@ func (s *public) findCommentsCtrl(w http.ResponseWriter, r *http.Request) { var b []byte switch format { case "tree": - withInfo := treeWithInfo{Tree: service.MakeTree(comments, sort), Info: commentsInfo} + withInfo := treeWithInfo{Tree: service.MakeTree(comments, sort, limit, offsetID), Info: commentsInfo} + withInfo.Info.CountLeft = withInfo.Tree.CountLeft() + withInfo.Info.LastComment = withInfo.Tree.LastComment() if withInfo.Nodes == nil { // eliminate json nil serialization withInfo.Nodes = []*service.Node{} } b, e = encodeJSONWithHTML(withInfo) default: + if limit > 0 || offsetID != "" { + comments, commentsInfo.CountLeft = limitComments(comments, limit, offsetID) + } + if limit > 0 && len(comments) > 0 { + commentsInfo.LastComment = comments[len(comments)-1].ID + } withInfo := commentsWithInfo{Comments: comments, Info: commentsInfo} b, e = encodeJSONWithHTML(withInfo) } @@ -432,3 +466,25 @@ func (s *public) parseSince(r *http.Request) (time.Time, error) { } return sinceTS, nil } + +// limitComments returns limited list of comments and count of comments left after limit. +// If offsetID is provided, the list will be sliced starting from the comment with this ID. +// If offsetID is not found, the full list will be returned. +// It's used for only " +func limitComments(c []store.Comment, limit int, offsetID string) (comments []store.Comment, countLeft int) { + if offsetID != "" { + for i, comment := range c { + if comment.ID == offsetID { + c = c[i+1:] + break + } + } + } + + if limit > 0 && len(c) > limit { + countLeft = len(c) - limit + c = c[:limit] + } + + return c, countLeft +} diff --git a/backend/app/rest/api/rest_public_test.go b/backend/app/rest/api/rest_public_test.go index 8e05e43ec7..deddd96a59 100644 --- a/backend/app/rest/api/rest_public_test.go +++ b/backend/app/rest/api/rest_public_test.go @@ -14,6 +14,7 @@ import ( cache "github.com/go-pkgz/lcw/v2" R "github.com/go-pkgz/rest" + "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -598,7 +599,7 @@ func TestPublic_FindCommentsCtrl_ConsistentCount(t *testing.T) { setScore(commentLocator, ids[4], -3) time.Sleep(time.Millisecond * 5) - c6 := store.Comment{Text: "third-level comment 2", ParentID: ids[4], Locator: commentLocator} + c6 := store.Comment{Text: "deleted third-level comment 2", ParentID: ids[4], Locator: commentLocator} ids[5], timestamps[5] = addCommentGetCreatedTime(t, c6, ts) // deleted later so not visible in site-wide requests setScore(commentLocator, ids[5], 10) @@ -612,7 +613,7 @@ func TestPublic_FindCommentsCtrl_ConsistentCount(t *testing.T) { setScore(commentLocator, ids[6], 1) time.Sleep(time.Millisecond * 5) - c8 := store.Comment{Text: "second-level comment 3", ParentID: ids[6], Locator: commentLocator} + c8 := store.Comment{Text: "deleted second-level comment 3", ParentID: ids[6], Locator: commentLocator} ids[7], timestamps[7] = addCommentGetCreatedTime(t, c8, ts) // deleted later so not visible in site-wide requests setScore(commentLocator, ids[7], -20) @@ -646,18 +647,19 @@ func TestPublic_FindCommentsCtrl_ConsistentCount(t *testing.T) { params string expectedBody string }{ - {"", fmt.Sprintf(`"info":{"count":7,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, - {"url=test-url", fmt.Sprintf(`"info":{"url":"test-url","count":6,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, - {"format=plain", fmt.Sprintf(`"info":{"count":7,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, - {"format=plain&url=test-url", fmt.Sprintf(`"info":{"url":"test-url","count":6,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, - {"since=" + sinceTenSecondsAgo, fmt.Sprintf(`"info":{"count":7,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, - {"url=test-url&since=" + sinceTenSecondsAgo, fmt.Sprintf(`"info":{"url":"test-url","count":6,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, - {"since=" + sinceTS[0], fmt.Sprintf(`"info":{"count":7,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, - {"url=test-url&since=" + sinceTS[0], fmt.Sprintf(`"info":{"url":"test-url","count":6,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, - {"since=" + sinceTS[1], fmt.Sprintf(`"info":{"count":6,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, - {"url=test-url&since=" + sinceTS[1], fmt.Sprintf(`"info":{"url":"test-url","count":5,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, - {"since=" + sinceTS[4], fmt.Sprintf(`"info":{"count":3,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, - {"url=test-url&since=" + sinceTS[4], fmt.Sprintf(`"info":{"url":"test-url","count":2,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, + // test parameters url, format, since, sort + {"", fmt.Sprintf(`"info":{"count":7,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, + {"url=test-url", fmt.Sprintf(`"info":{"url":"test-url","count":6,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, + {"format=plain", fmt.Sprintf(`"info":{"count":7,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, + {"format=plain&url=test-url", fmt.Sprintf(`"info":{"url":"test-url","count":6,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, + {"since=" + sinceTenSecondsAgo, fmt.Sprintf(`"info":{"count":7,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, + {"url=test-url&since=" + sinceTenSecondsAgo, fmt.Sprintf(`"info":{"url":"test-url","count":6,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, + {"since=" + sinceTS[0], fmt.Sprintf(`"info":{"count":7,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, + {"url=test-url&since=" + sinceTS[0], fmt.Sprintf(`"info":{"url":"test-url","count":6,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, + {"since=" + sinceTS[1], fmt.Sprintf(`"info":{"count":6,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, + {"url=test-url&since=" + sinceTS[1], fmt.Sprintf(`"info":{"url":"test-url","count":5,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, + {"since=" + sinceTS[4], fmt.Sprintf(`"info":{"count":3,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[8])}, + {"url=test-url&since=" + sinceTS[4], fmt.Sprintf(`"info":{"url":"test-url","count":2,"count_left":0,"first_time":%q,"last_time":%q}`, formattedTS[0], formattedTS[7])}, {"format=tree", `"info":{"count":7`}, {"format=tree&url=test-url", `"info":{"url":"test-url","count":6`}, {"format=tree&sort=+time", `"info":{"count":7`}, @@ -677,19 +679,103 @@ func TestPublic_FindCommentsCtrl_ConsistentCount(t *testing.T) { // three comments of which last one deleted and doesn't have controversy so returned last {"sort=-controversy&url=test-url&since=" + sinceTS[5], fmt.Sprintf(`"score":0,"vote":0,"time":%q,"delete":true}],"info":{"url":"test-url","count":1`, formattedTS[7])}, // test readonly status for the post without comments - {"url=readonly-test", `"info":{"count":0,"read_only":true`}, - {"format=tree&url=readonly-test", `"info":{"count":0,"read_only":true`}, + {"url=readonly-test", `"info":{"count":0,"count_left":0,"read_only":true`}, + {"format=tree&url=readonly-test", `"info":{"count":0,"count_left":0,"read_only":true`}, + + // test parameters limit, offset_id for format=plain + {"limit=bad", `{"code":1,"details":"bad limit value","error":"strconv.Atoi: parsing \"bad\": invalid syntax"}`}, + {"offset_id=bad", `{"code":1,"details":"bad offset_id value","error":"invalid UUID length: 3"}`}, + {"limit=2", `"info":{"count":7,"count_left":5,"last_comment":"` + ids[1]}, + {"limit=6", `"info":{"count":7,"count_left":1,"last_comment":"` + ids[6]}, + {"limit=7", `"info":{"count":7,"count_left":0,"last_comment":"` + ids[8]}, + {"limit=2&url=test-url", `"info":{"url":"test-url","count":6,"count_left":6,"last_comment":"` + ids[1]}, + {"limit=6&url=test-url", `"info":{"url":"test-url","count":6,"count_left":2,"last_comment":"` + ids[5]}, + {"limit=7&url=test-url", `"info":{"url":"test-url","count":6,"count_left":1,"last_comment":"` + ids[6]}, + {fmt.Sprintf("limit=2&offset_id=%s", ids[2]), `"info":{"count":7,"count_left":2,"last_comment":"` + ids[4]}, + {fmt.Sprintf("limit=2&offset_id=%s", ids[3]), `"info":{"count":7,"count_left":1,"last_comment":"` + ids[6]}, + {fmt.Sprintf("limit=2&offset_id=%s", ids[4]), `"info":{"count":7,"count_left":0`}, + {fmt.Sprintf("limit=1&offset_id=%s", ids[6]), `"info":{"count":7,"count_left":0`}, + {fmt.Sprintf("limit=2&offset_id=%s", ids[8]), `"info":{"count":7,"count_left":0`}, + {fmt.Sprintf("limit=2&url=test-url&offset_id=%s", ids[2]), `"info":{"url":"test-url","count":6,"count_left":3,"last_comment":"` + ids[4]}, + {fmt.Sprintf("limit=2&url=test-url&offset_id=%s", ids[3]), `"info":{"url":"test-url","count":6,"count_left":2,"last_comment":"` + ids[5]}, + {fmt.Sprintf("limit=2&url=test-url&offset_id=%s", ids[4]), `"info":{"url":"test-url","count":6,"count_left":1,"last_comment":"` + ids[6]}, + {fmt.Sprintf("limit=1&url=test-url&offset_id=%s", ids[6]), `"info":{"url":"test-url","count":6,"count_left":0,"last_comment":"` + ids[7]}, + {fmt.Sprintf("limit=2&url=test-url&offset_id=%s", ids[8]), `"info":{"url":"test-url","count":6,"count_left":6,`}, + // deleted comment, offset is ignored in site-wide request but not for particular URL + {fmt.Sprintf("limit=2&offset_id=%s", ids[5]), `"info":{"count":7,"count_left":5,"last_comment":"` + ids[1]}, + {fmt.Sprintf("limit=2&url=test-url&offset_id=%s", ids[5]), `"info":{"url":"test-url","count":6,"count_left":0,"last_comment":"` + ids[7]}, + // non-existing comment, offset is ignored, deleted comments included into request with "url" + {fmt.Sprintf("limit=1&offset_id=%s", uuid.New().String()), `"info":{"count":7,"count_left":6,"last_comment":"` + ids[0]}, + {fmt.Sprintf("limit=1&url=test-url&offset_id=%s", uuid.New().String()), `"info":{"url":"test-url","count":6,"count_left":7,"last_comment":"` + ids[0]}, + // since is ignored for tree format, so we test it only for plain + {"limit=6&since=" + sinceTenSecondsAgo, `"info":{"count":7,"count_left":1,"last_comment":"` + ids[6]}, + {"limit=1&since=" + sinceTS[4], `"info":{"count":3,"count_left":2,"last_comment":"` + ids[4]}, + {"limit=6&url=test-url&since=" + sinceTenSecondsAgo, `"info":{"url":"test-url","count":6,"count_left":2,"last_comment":"` + ids[5]}, + {"limit=1&url=test-url&since=" + sinceTS[4], `"info":{"url":"test-url","count":2,"count_left":3,"last_comment":"` + ids[4]}, + // start with deleted comment timestamp + {"limit=1&since=" + sinceTS[5], `"info":{"count":2,"count_left":1,"last_comment":"` + ids[6]}, + {"limit=1&since=" + sinceTS[6], `"info":{"count":2,"count_left":1,"last_comment":"` + ids[6]}, + {"limit=1&url=test-url&since=" + sinceTS[5], `"info":{"url":"test-url","count":1,"count_left":2,"last_comment":"` + ids[5]}, + {"limit=1&url=test-url&since=" + sinceTS[6], `"info":{"url":"test-url","count":1,"count_left":1,"last_comment":"` + ids[6]}, + // test sort + {"limit=1&sort=+time&url=test-url", `"info":{"url":"test-url","count":6,"count_left":7,"last_comment":"` + ids[0]}, + {"limit=1&sort=-time&url=test-url", `"info":{"url":"test-url","count":6,"count_left":7,"last_comment":"` + ids[7]}, + {"limit=1&sort=+score&url=test-url", `"info":{"url":"test-url","count":6,"count_left":7,"last_comment":"` + ids[6]}, + {"limit=1&sort=-score&url=test-url", `"info":{"url":"test-url","count":6,"count_left":7,"last_comment":"` + ids[2]}, + {"limit=1&sort=+controversy&url=test-url", `"info":{"url":"test-url","count":6,"count_left":7,"last_comment":"` + ids[0]}, + {"limit=1&sort=-controversy&url=test-url", `"info":{"url":"test-url","count":6,"count_left":7,"last_comment":"` + ids[3]}, + + // test parameters limit, offset_id for format=tree + {"format=tree&limit=bad", `{"code":1,"details":"bad limit value","error":"strconv.Atoi: parsing \"bad\": invalid syntax"}`}, + {"format=tree&offset_id=bad", `{"code":1,"details":"bad offset_id value","error":"invalid UUID length: 3"}`}, + {"format=tree&limit=2", `"info":{"count":7,"count_left":4,"last_comment":"` + ids[0]}, + {"format=tree&limit=6", `"info":{"count":7,"count_left":2,"last_comment":"` + ids[1]}, + {"format=tree&limit=7", `"info":{"count":7,"count_left":1,"last_comment":"` + ids[6]}, + {"format=tree&url=test-url&limit=2", `"info":{"url":"test-url","count":6,"count_left":3,"last_comment":"` + ids[0]}, + {"format=tree&url=test-url&limit=6", `"info":{"url":"test-url","count":6,"count_left":1,"last_comment":"` + ids[1]}, + {"format=tree&url=test-url&limit=7", `"info":{"url":"test-url","count":6,"count_left":0,"last_comment":"` + ids[6]}, + // start after first top-level comment + {fmt.Sprintf("format=tree&limit=2&offset_id=%s", ids[0]), `"info":{"count":7,"count_left":2,"last_comment":"` + ids[1]}, + {fmt.Sprintf("format=tree&url=test-url&limit=2&offset_id=%s", ids[0]), `"info":{"url":"test-url","count":6,"count_left":1,"last_comment":"` + ids[1]}, + // start after second top-level comment + {fmt.Sprintf("format=tree&limit=2&offset_id=%s", ids[1]), `"info":{"count":7,"count_left":1,"last_comment":"` + ids[6]}, + {fmt.Sprintf("format=tree&url=test-url&limit=2&offset_id=%s", ids[1]), `"info":{"url":"test-url","count":6,"count_left":0,"last_comment":"` + ids[6]}, + // start after third top-level comment, so expect comment to post 2, or no comments on post 1 if "url" is set + {fmt.Sprintf("format=tree&limit=1&offset_id=%s", ids[6]), `"info":{"count":7,"count_left":0,"last_comment":"` + ids[8]}, + {fmt.Sprintf("format=tree&url=test-url&limit=1&offset_id=%s", ids[6]), `"info":{"url":"test-url","count":6,"count_left":0`}, + // non-root comment IDs or non-existing IDs are ignored + {fmt.Sprintf("format=tree&limit=2&offset_id=%s", ids[2]), `"info":{"count":7,"count_left":4,"last_comment":"` + ids[0]}, + {fmt.Sprintf("format=tree&limit=2&offset_id=%s", ids[3]), `"info":{"count":7,"count_left":4,"last_comment":"` + ids[0]}, + {fmt.Sprintf("format=tree&limit=2&offset_id=%s", ids[4]), `"info":{"count":7,"count_left":4,"last_comment":"` + ids[0]}, + {fmt.Sprintf("format=tree&limit=2&offset_id=%s", ids[7]), `"info":{"count":7,"count_left":4,"last_comment":"` + ids[0]}, + {fmt.Sprintf("format=tree&limit=1&offset_id=%s", uuid.New().String()), `"info":{"count":7,"count_left":4,"last_comment":"` + ids[0]}, + {fmt.Sprintf("format=tree&url=test-url&limit=2&offset_id=%s", ids[2]), `"info":{"url":"test-url","count":6,"count_left":3,"last_comment":"` + ids[0]}, + {fmt.Sprintf("format=tree&url=test-url&limit=2&offset_id=%s", ids[3]), `"info":{"url":"test-url","count":6,"count_left":3,"last_comment":"` + ids[0]}, + {fmt.Sprintf("format=tree&url=test-url&limit=2&offset_id=%s", ids[4]), `"info":{"url":"test-url","count":6,"count_left":3,"last_comment":"` + ids[0]}, + {fmt.Sprintf("format=tree&url=test-url&limit=2&offset_id=%s", ids[7]), `"info":{"url":"test-url","count":6,"count_left":3,"last_comment":"` + ids[0]}, + {fmt.Sprintf("format=tree&url=test-url&limit=1&offset_id=%s", uuid.New().String()), `"info":{"url":"test-url","count":6,"count_left":3,"last_comment":"` + ids[0]}, + // test sort + {"format=tree&limit=1&sort=+time&url=test-url", `"info":{"url":"test-url","count":6,"count_left":3,"last_comment":"` + ids[0]}, + {"format=tree&limit=1&sort=-time&url=test-url", `"info":{"url":"test-url","count":6,"count_left":5,"last_comment":"` + ids[6]}, + {"format=tree&limit=1&sort=+score&url=test-url", `"info":{"url":"test-url","count":6,"count_left":5,"last_comment":"` + ids[6]}, + {"format=tree&limit=1&sort=-score&url=test-url", `"info":{"url":"test-url","count":6,"count_left":4,"last_comment":"` + ids[1]}, + {"format=tree&limit=1&sort=+controversy&url=test-url", `"info":{"url":"test-url","count":6,"count_left":3,"last_comment":"` + ids[0]}, + {"format=tree&limit=1&sort=-controversy&url=test-url", `"info":{"url":"test-url","count":6,"count_left":5,"last_comment":"` + ids[6]}, } for _, tc := range testCases { t.Run(tc.params, func(t *testing.T) { url := fmt.Sprintf(ts.URL+"/api/v1/find?site=remark42&%s", tc.params) body, code := get(t, url) - assert.Equal(t, http.StatusOK, code) + expectedStatus := http.StatusOK + if strings.Contains(tc.params, "=bad") { + expectedStatus = http.StatusBadRequest + } + assert.Equal(t, expectedStatus, code) assert.Contains(t, body, tc.expectedBody) t.Log(body) // prevent hit limiter from engaging - time.Sleep(50 * time.Millisecond) + time.Sleep(80 * time.Millisecond) }) } } diff --git a/backend/app/store/comment.go b/backend/app/store/comment.go index 877f942a27..b23eb58037 100644 --- a/backend/app/store/comment.go +++ b/backend/app/store/comment.go @@ -45,11 +45,13 @@ type Edit struct { // PostInfo holds summary for given post url type PostInfo struct { - URL string `json:"url,omitempty"` // can be attached to site-wide comments but won't be set then - Count int `json:"count"` - ReadOnly bool `json:"read_only,omitempty" bson:"read_only,omitempty"` // can be attached to site-wide comments but won't be set then - FirstTS time.Time `json:"first_time,omitempty" bson:"first_time,omitempty"` - LastTS time.Time `json:"last_time,omitempty" bson:"last_time,omitempty"` + URL string `json:"url,omitempty"` // can be attached to site-wide comments but won't be set then + Count int `json:"count"` + CountLeft int `json:"count_left"` // used only with returning search results limited by number, otherwise zero + LastComment string `json:"last_comment,omitempty"` // used only with returning search results limited by number + ReadOnly bool `json:"read_only,omitempty" bson:"read_only,omitempty"` // can be attached to site-wide comments but won't be set then + FirstTS time.Time `json:"first_time,omitempty" bson:"first_time,omitempty"` + LastTS time.Time `json:"last_time,omitempty" bson:"last_time,omitempty"` } // BlockedUser holds id and ts for blocked user diff --git a/backend/app/store/service/tree.go b/backend/app/store/service/tree.go index d6ffdc7649..276392ac36 100644 --- a/backend/app/store/service/tree.go +++ b/backend/app/store/service/tree.go @@ -11,6 +11,9 @@ import ( // Tree is formatter making tree from the list of comments type Tree struct { Nodes []*Node `json:"comments"` + + countLeft int + lastLimitedComment string } // Node is a comment with optional replies @@ -29,7 +32,7 @@ type recurData struct { } // MakeTree gets unsorted list of comments and produces Tree -func MakeTree(comments []store.Comment, sortType string) *Tree { +func MakeTree(comments []store.Comment, sortType string, limit int, offsetID string) *Tree { if len(comments) == 0 { return &Tree{} } @@ -53,9 +56,20 @@ func MakeTree(comments []store.Comment, sortType string) *Tree { } res.sortNodes(sortType) + res.limit(limit, offsetID) return &res } +// CountLeft returns number of comments left after limit, 0 if no limit was set +func (t *Tree) CountLeft() int { + return t.countLeft +} + +// LastComment returns ID of the last comment in the tree after limit, empty string if no limit was set +func (t *Tree) LastComment() string { + return t.lastLimitedComment +} + // proc makes tree for one top-level comment recursively func (t *Tree) proc(comments []store.Comment, node *Node, rd *recurData, parentID string) (result *Node) { if rd.tsModified.IsZero() || rd.tsCreated.IsZero() { @@ -145,3 +159,81 @@ func (t *Tree) sortNodes(sortType string) { } }) } + +// limit limits number of comments in tree and sets countLeft and lastLimitedComment, +// starting with comment next after offsetID. +// +// If offsetID is empty or invalid, it starts from the beginning. If limit is 0, it doesn't limit anything. +// +// Limit is applied to top-level comments only, so top-level comments only returned with all replies, +// and lastLimitedComment is set to the last top-level comment and not last reply in it. +// +// In case limit is less than the number of replies to first comment after given offset, that first comment is +// returned completely with all replies. +func (t *Tree) limit(limit int, offsetID string) { + if offsetID == "" && limit <= 0 { + return + } + + start := 0 + if offsetID != "" { + for i, n := range t.Nodes { + if n.Comment.ID == offsetID { + start = i + 1 + break + } + } + } + + if start == len(t.Nodes) { // If the start index is beyond the available nodes, clear the nodes + t.Nodes = []*Node{} + return + } + + t.Nodes = t.Nodes[start:] + + // if there is only offset and no limit, there are no comments left and no point in returning + // the last comment ID as there are no comments beyond it. + if limit <= 0 { + return + } + + // Traverse and limit the number of top-level nodes, including their replies + limitedNodes := []*Node{} + commentsCount := 0 + + for _, node := range t.Nodes { + repliesCount := countReplies(node) + 1 // Count this node and its replies + + // If the limit is already reached or exceeded, calculate countLeft and move to the next node + if commentsCount >= limit { + t.countLeft += repliesCount + continue + } + + // Check if we just exceeded the limit and there are already some nodes in the list, + // as otherwise we would have to return the first node with all its replies even if it exceeds the limit. + if commentsCount+repliesCount >= limit && len(limitedNodes) > 0 { + t.countLeft += repliesCount + commentsCount = limit // Adjust commentsCount to stop checking limit for the next nodes + continue + } + + // Add the node and its replies to the list + limitedNodes = append(limitedNodes, node) + commentsCount += repliesCount + } + + t.lastLimitedComment = limitedNodes[len(limitedNodes)-1].Comment.ID + t.Nodes = limitedNodes +} + +// countReplies counts the total number of replies recursively for a given node. +func countReplies(node *Node) int { + count := 0 + for _, reply := range node.Replies { + count++ // Count the reply itself + count += countReplies(reply) // Recursively count its replies + } + return count +} diff --git a/backend/app/store/service/tree_test.go b/backend/app/store/service/tree_test.go index c7e90fc751..260d269390 100644 --- a/backend/app/store/service/tree_test.go +++ b/backend/app/store/service/tree_test.go @@ -37,14 +37,14 @@ func TestMakeTree(t *testing.T) { {Locator: loc, ID: "611", ParentID: "61", Deleted: true}, } - res := MakeTree(comments, "time") + res := MakeTree(comments, "time", 0, "") resJSON, err := json.Marshal(&res) require.NoError(t, err) expJSON := mustLoadJSONFile(t, "testdata/tree.json") assert.Equal(t, expJSON, resJSON) - res = MakeTree([]store.Comment{}, "time") + res = MakeTree([]store.Comment{}, "time", 0, "") assert.Equal(t, &Tree{}, res) } @@ -75,7 +75,7 @@ func TestMakeEmptySubtree(t *testing.T) { {Locator: loc, ID: "3", Timestamp: ts(48, 1), Deleted: true}, // deleted top level } - res := MakeTree(comments, "time") + res := MakeTree(comments, "time", 0, "") resJSON, err := json.Marshal(&res) require.NoError(t, err) t.Log(string(resJSON)) @@ -104,50 +104,50 @@ func TestTreeSortNodes(t *testing.T) { {ID: "5", Deleted: true, Timestamp: time.Date(2017, 12, 25, 19, 47, 22, 150, time.UTC)}, } - res := MakeTree(comments, "+active") + res := MakeTree(comments, "+active", 0, "") assert.Equal(t, "2", res.Nodes[0].Comment.ID) t.Log(res.Nodes[0].Comment.ID, res.Nodes[0].tsModified) - res = MakeTree(comments, "-active") + res = MakeTree(comments, "-active", 0, "") t.Log(res.Nodes[0].Comment.ID, res.Nodes[0].tsModified) assert.Equal(t, "1", res.Nodes[0].Comment.ID) - res = MakeTree(comments, "+time") + res = MakeTree(comments, "+time", 0, "") t.Log(res.Nodes[0].Comment.ID, res.Nodes[0].tsModified) assert.Equal(t, "1", res.Nodes[0].Comment.ID) - res = MakeTree(comments, "-time") + res = MakeTree(comments, "-time", 0, "") assert.Equal(t, "6", res.Nodes[0].Comment.ID) - res = MakeTree(comments, "score") + res = MakeTree(comments, "score", 0, "") assert.Equal(t, "4", res.Nodes[0].Comment.ID) assert.Equal(t, "3", res.Nodes[1].Comment.ID) assert.Equal(t, "6", res.Nodes[2].Comment.ID) assert.Equal(t, "1", res.Nodes[3].Comment.ID) - res = MakeTree(comments, "+score") + res = MakeTree(comments, "+score", 0, "") assert.Equal(t, "4", res.Nodes[0].Comment.ID) - res = MakeTree(comments, "-score") + res = MakeTree(comments, "-score", 0, "") assert.Equal(t, "2", res.Nodes[0].Comment.ID) assert.Equal(t, "1", res.Nodes[1].Comment.ID) assert.Equal(t, "3", res.Nodes[2].Comment.ID) assert.Equal(t, "6", res.Nodes[3].Comment.ID) - res = MakeTree(comments, "+controversy") + res = MakeTree(comments, "+controversy", 0, "") assert.Equal(t, "3", res.Nodes[0].Comment.ID) assert.Equal(t, "6", res.Nodes[1].Comment.ID) assert.Equal(t, "2", res.Nodes[2].Comment.ID) assert.Equal(t, "4", res.Nodes[3].Comment.ID) assert.Equal(t, "1", res.Nodes[4].Comment.ID) - res = MakeTree(comments, "-controversy") + res = MakeTree(comments, "-controversy", 0, "") assert.Equal(t, "1", res.Nodes[0].Comment.ID) assert.Equal(t, "4", res.Nodes[1].Comment.ID) assert.Equal(t, "2", res.Nodes[2].Comment.ID) assert.Equal(t, "3", res.Nodes[3].Comment.ID) - res = MakeTree(comments, "undefined") + res = MakeTree(comments, "undefined", 0, "") t.Log(res.Nodes[0].Comment.ID, res.Nodes[0].tsModified) assert.Equal(t, "1", res.Nodes[0].Comment.ID) } @@ -160,7 +160,7 @@ func BenchmarkTree(b *testing.B) { assert.NoError(b, err) for i := 0; i < b.N; i++ { - res := MakeTree(comments, "time") + res := MakeTree(comments, "time", 0, "") assert.NotNil(b, res) } } diff --git a/backend/remark.rest b/backend/remark.rest index da088aedc8..33bb3c8ef9 100644 --- a/backend/remark.rest +++ b/backend/remark.rest @@ -5,6 +5,12 @@ GET {{host}}/api/v1/find?site={{site}}&sort=-time&format=tree&url={{url}} ### find request with plain GET {{host}}/api/v1/find?site={{site}}&sort=-controversy&format=plain&url={{url}} +### find 10 first comments for given URL +GET {{host}}/api/v1/find?site={{site}}&sort=-controversy&format=plain&url={{url}}&limit=10 + +### find 10 comments after given comment ID (3665976683 in this example) for given URL +GET {{host}}/api/v1/find?site={{site}}&sort=-controversy&format=plain&url={{url}}&limit=10&after=3665976683 + ### find request with plain. dev token for secret=12345, not admin GET {{host}}/api/v1/find?site={{site}}&sort=-controversy&format=plain&url={{url}} X-JWT: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOiJyZW1hcmsiLCJleHAiOjE5NzYwNTY3NTYsImp0aSI6IjJlOGJmMTE5OTI0MjQxMDRjYjFhZGRlODllMWYwNGFiMTg4YWZjMzQiLCJpYXQiOjE1NzYwNTY0NTYsImlzcyI6InJlbWFyazQyIiwidXNlciI6eyJuYW1lIjoiZGV2X3VzZXIiLCJpZCI6ImRldl91c2VyIiwicGljdHVyZSI6Imh0dHA6Ly8xMjcuMC4wLjE6ODA4MC9hcGkvdjEvYXZhdGFyL2NjZmEyYWJkMDE2Njc2MDViNGUxZmM0ZmNiOTFiMWUxYWYzMjMyNDAuaW1hZ2UiLCJhdHRycyI6eyJhZG1pbiI6dHJ1ZSwiYmxvY2tlZCI6ZmFsc2V9fX0.6Qt5s2enBMRC-Jmsua01yViVYI95Dx6BPBMaNjj36d4