From 7c2d32202cde5dc4386bb021536c7989bf149af0 Mon Sep 17 00:00:00 2001 From: "Charles Le (cle-mbp-m3)" Date: Wed, 11 Mar 2026 18:34:43 -0700 Subject: [PATCH 1/4] feat: implement cursor-based pagination for Jira Cloud v3 API Jira Cloud deprecated /rest/api/2/search and the new v3 search endpoint uses cursor-based pagination via nextPageToken instead of offset-based startAt. The SearchResult struct already deserialized these fields but they were never used, causing --paginate offset to be silently ignored and limiting results to 100 max. Add SearchAll() that auto-paginates using nextPageToken until isLast is true or the requested limit is reached. Remove the hard cap of 100 in getPaginateParams() since SearchAll handles chunking internally. Fixes #898 Co-Authored-By: Claude Opus 4.6 --- api/client.go | 2 +- internal/cmd/epic/list/list.go | 4 +-- internal/cmd/issue/list/list.go | 2 +- internal/query/issue.go | 11 +++---- pkg/jira/search.go | 56 +++++++++++++++++++++++++++++++++ 5 files changed, 64 insertions(+), 11 deletions(-) diff --git a/api/client.go b/api/client.go index 683f4b16..333daa1c 100644 --- a/api/client.go +++ b/api/client.go @@ -138,7 +138,7 @@ func ProxySearch(c *jira.Client, jql string, from, limit uint) (*jira.SearchResu if it == jira.InstallationTypeLocal { issues, err = c.SearchV2(jql, from, limit) } else { - issues, err = c.Search(jql, limit) + issues, err = c.SearchAll(jql, limit) } return issues, err diff --git a/internal/cmd/epic/list/list.go b/internal/cmd/epic/list/list.go index 5ad6ac99..877426ad 100644 --- a/internal/cmd/epic/list/list.go +++ b/internal/cmd/epic/list/list.go @@ -106,7 +106,7 @@ func singleEpicView(flags query.FlagParser, key, project, projectType, server st q.Params().Parent = key q.Params().IssueType = "" - resp, err = client.Search(q.Get(), q.Params().Limit) + resp, err = client.SearchAll(q.Get(), q.Params().Limit) } else { resp, err = client.EpicIssues(key, q.Get(), q.Params().From, q.Params().Limit) } @@ -209,7 +209,7 @@ func epicExplorerView(cmd *cobra.Command, flags query.FlagParser, project, proje q.Params().Parent = key q.Params().IssueType = "" - resp, err = client.Search(q.Get(), q.Params().Limit) + resp, err = client.SearchAll(q.Get(), q.Params().Limit) } else { resp, err = client.EpicIssues(key, "", q.Params().From, q.Params().Limit) } diff --git a/internal/cmd/issue/list/list.go b/internal/cmd/issue/list/list.go index efae6c49..f1b18866 100644 --- a/internal/cmd/issue/list/list.go +++ b/internal/cmd/issue/list/list.go @@ -238,7 +238,7 @@ func SetFlags(cmd *cobra.Command) { cmd.Flags().StringP("jql", "q", "", "Run a raw JQL query in a given project context") cmd.Flags().String("order-by", "created", "Field to order the list with") cmd.Flags().Bool("reverse", false, "Reverse the display order (default \"DESC\")") - cmd.Flags().String("paginate", "0:100", "Paginate the result. Max 100 at a time, format: : where is optional") + cmd.Flags().String("paginate", "0:100", "Paginate the result, format: : where is optional. Limits > 100 are auto-paginated") cmd.Flags().Bool("plain", false, "Display output in plain mode") cmd.Flags().Bool("no-headers", false, "Don't display table headers in plain mode. Works only with --plain") cmd.Flags().Bool("no-truncate", false, "Show all available columns in plain mode. Works only with --plain") diff --git a/internal/query/issue.go b/internal/query/issue.go index 73f63146..01a9f6f6 100644 --- a/internal/query/issue.go +++ b/internal/query/issue.go @@ -326,10 +326,6 @@ func getPaginateParams(paginate string) (uint, uint, error) { errInvalidPaginateArg = fmt.Errorf( "invalid argument for paginate: must be a positive integer in format :, where is optional", ) - errOutOfBounds = fmt.Errorf( - "invalid argument for paginate: Format :, where is optional and "+ - " must be between %d and %d (inclusive)", 1, defaultLimit, - ) ) paginate = strings.TrimSpace(paginate) @@ -360,12 +356,13 @@ func getPaginateParams(paginate string) (uint, uint, error) { } } + errOutOfBounds := fmt.Errorf( + "invalid argument for paginate: and must be positive integers", + ) + if from < 0 || limit <= 0 { return 0, 0, errOutOfBounds } - if limit > defaultLimit { - return 0, 0, errOutOfBounds - } return uint(from), uint(limit), nil } diff --git a/pkg/jira/search.go b/pkg/jira/search.go index 71eedd80..a0ef22f3 100644 --- a/pkg/jira/search.go +++ b/pkg/jira/search.go @@ -15,12 +15,68 @@ type SearchResult struct { Issues []*Issue `json:"issues"` } +const maxPageSize uint = 100 + // Search searches for issues using v3 version of the Jira GET /search endpoint. +// This performs a single request and returns up to `limit` results. func (c *Client) Search(jql string, limit uint) (*SearchResult, error) { path := fmt.Sprintf("/search/jql?jql=%s&maxResults=%d&fields=*all", url.QueryEscape(jql), limit) return c.search(path, apiVersion3) } +// SearchAll searches for issues using v3 version of the Jira GET /search endpoint +// with automatic cursor-based pagination via nextPageToken. It fetches pages of up +// to 100 issues at a time until `totalLimit` issues are collected or all results +// are exhausted (isLast == true). +func (c *Client) SearchAll(jql string, totalLimit uint) (*SearchResult, error) { + var allIssues []*Issue + + pageSize := totalLimit + if pageSize > maxPageSize { + pageSize = maxPageSize + } + + nextPageToken := "" + for { + path := fmt.Sprintf("/search/jql?jql=%s&maxResults=%d&fields=*all", url.QueryEscape(jql), pageSize) + if nextPageToken != "" { + path += fmt.Sprintf("&nextPageToken=%s", url.QueryEscape(nextPageToken)) + } + + result, err := c.search(path, apiVersion3) + if err != nil { + return nil, err + } + + allIssues = append(allIssues, result.Issues...) + + if result.IsLast || result.NextPageToken == "" { + break + } + if totalLimit > 0 && uint(len(allIssues)) >= totalLimit { + break + } + + nextPageToken = result.NextPageToken + + // Adjust page size for the last page if needed. + remaining := totalLimit - uint(len(allIssues)) + if remaining < pageSize { + pageSize = remaining + } + } + + // Trim to totalLimit if we overshot. + if totalLimit > 0 && uint(len(allIssues)) > totalLimit { + allIssues = allIssues[:totalLimit] + } + + return &SearchResult{ + IsLast: true, + Issues: allIssues, + }, nil +} + // SearchV2 searches an issues using v2 version of the Jira GET /search endpoint. func (c *Client) SearchV2(jql string, from, limit uint) (*SearchResult, error) { path := fmt.Sprintf("/search?jql=%s&startAt=%d&maxResults=%d", url.QueryEscape(jql), from, limit) From f8b3102c663a51ac7d8279b0e3dbd01d63cae016 Mon Sep 17 00:00:00 2001 From: "Charles Le (cle-mbp-m3)" Date: Wed, 11 Mar 2026 18:40:58 -0700 Subject: [PATCH 2/4] test: add comprehensive tests for SearchAll cursor-based pagination Add 15 test cases for the new SearchAll() method covering: Happy paths: - Single page results (isLast=true on first response) - Two-page pagination with nextPageToken forwarding - Three-page pagination across multiple cursor tokens - Limit enforcement (stops fetching when limit reached) - Limit trimming when API returns more than requested - Limit less than page size (maxResults adjusted down) - Limit exceeds page size (maxResults capped at 100) - Page size adjustment for the final page Edge cases: - Empty results (no issues match JQL) - Empty nextPageToken with isLast=false (defensive stop) - Single issue total - NextPageToken URL encoding verification Error cases: - HTTP error on first page - HTTP error mid-pagination (second page fails) - Malformed JSON response Also add 15 test cases for getPaginateParams() covering: - Default values, limit-only, from:limit format - Large limits (200, 500, 1000) accepted without capping - Whitespace handling, negative values, invalid formats Co-Authored-By: Claude Opus 4.6 --- internal/query/issue_test.go | 116 +++++ pkg/jira/search_test.go | 546 ++++++++++++++++++++++++ pkg/jira/testdata/search_empty.json | 5 + pkg/jira/testdata/search_page1.json | 38 ++ pkg/jira/testdata/search_page2.json | 22 + pkg/jira/testdata/search_page2_mid.json | 22 + pkg/jira/testdata/search_page3.json | 22 + 7 files changed, 771 insertions(+) create mode 100644 pkg/jira/testdata/search_empty.json create mode 100644 pkg/jira/testdata/search_page1.json create mode 100644 pkg/jira/testdata/search_page2.json create mode 100644 pkg/jira/testdata/search_page2_mid.json create mode 100644 pkg/jira/testdata/search_page3.json diff --git a/internal/query/issue_test.go b/internal/query/issue_test.go index 4c611d76..01fc46d6 100644 --- a/internal/query/issue_test.go +++ b/internal/query/issue_test.go @@ -447,3 +447,119 @@ func TestIssueGet(t *testing.T) { }) } } + +func TestGetPaginateParams(t *testing.T) { + t.Parallel() + + cases := []struct { + name string + input string + expectedFrom uint + expectedLimit uint + expectError bool + }{ + { + name: "empty string returns defaults", + input: "", + expectedFrom: 0, + expectedLimit: defaultLimit, + }, + { + name: "limit only", + input: "50", + expectedFrom: 0, + expectedLimit: 50, + }, + { + name: "from and limit", + input: "10:50", + expectedFrom: 10, + expectedLimit: 50, + }, + { + // Verifies that limits larger than 100 are accepted. SearchAll handles + // pagination internally, so the query layer should not cap the limit. + name: "limit 200 accepted", + input: "200", + expectedFrom: 0, + expectedLimit: 200, + }, + { + name: "limit 500 accepted", + input: "500", + expectedFrom: 0, + expectedLimit: 500, + }, + { + name: "limit 1000 accepted", + input: "1000", + expectedFrom: 0, + expectedLimit: 1000, + }, + { + name: "from 0 and limit 100", + input: "0:100", + expectedFrom: 0, + expectedLimit: 100, + }, + { + // Whitespace should be trimmed before parsing. + name: "whitespace trimmed", + input: " 50 ", + expectedFrom: 0, + expectedLimit: 50, + }, + { + name: "non-numeric input", + input: "abc", + expectError: true, + }, + { + name: "negative limit", + input: "-1", + expectError: true, + }, + { + name: "zero limit", + input: "0", + expectError: true, + }, + { + name: "negative from", + input: "-1:50", + expectError: true, + }, + { + name: "invalid format with multiple colons", + input: "1:2:3", + expectError: true, + }, + { + name: "non-numeric from", + input: "abc:50", + expectError: true, + }, + { + name: "non-numeric limit in pair", + input: "10:abc", + expectError: true, + }, + } + + for _, tc := range cases { + tc := tc + + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + from, limit, err := getPaginateParams(tc.input) + if tc.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expectedFrom, from) + assert.Equal(t, tc.expectedLimit, limit) + } + }) + } +} diff --git a/pkg/jira/search_test.go b/pkg/jira/search_test.go index fb4736e0..8aca5bb3 100644 --- a/pkg/jira/search_test.go +++ b/pkg/jira/search_test.go @@ -2,10 +2,13 @@ package jira import ( + "encoding/json" + "fmt" "net/http" "net/http/httptest" "net/url" "os" + "sync/atomic" "testing" "time" @@ -140,3 +143,546 @@ func TestSearch(t *testing.T) { _, err = client.SearchV2("project=TEST", 0, 100) assert.Error(t, &ErrUnexpectedResponse{}, err) } + +func TestSearchAllSinglePage(t *testing.T) { + // Verifies that SearchAll returns all issues when the first (and only) response + // has isLast=true. This is the simplest pagination case: no follow-up requests needed. + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + qs := r.URL.Query() + assert.Equal(t, "project=TEST ORDER BY created DESC", qs.Get("jql")) + assert.Equal(t, "*all", qs.Get("fields")) + // With totalLimit=100, maxResults should be 100 (capped at maxPageSize). + assert.Equal(t, "100", qs.Get("maxResults")) + // First request should NOT have a nextPageToken parameter. + assert.Empty(t, qs.Get("nextPageToken")) + + resp, err := os.ReadFile("./testdata/search.json") + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 100) + assert.NoError(t, err) + + assert.True(t, actual.IsLast) + assert.Len(t, actual.Issues, 3) + assert.Equal(t, "TEST-1", actual.Issues[0].Key) + assert.Equal(t, "TEST-2", actual.Issues[1].Key) + assert.Equal(t, "TEST-3", actual.Issues[2].Key) +} + +func TestSearchAllTwoPages(t *testing.T) { + // Verifies cursor-based pagination across two pages: the first response returns + // isLast=false with a nextPageToken, and the second returns isLast=true. + // Issues from both pages must be combined in order without duplicates. + var requestCount int32 + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + page := atomic.AddInt32(&requestCount, 1) + qs := r.URL.Query() + + var fixture string + switch page { + case 1: + // First page: no nextPageToken in request. + assert.Empty(t, qs.Get("nextPageToken")) + fixture = "./testdata/search_page1.json" + case 2: + // Second page: must include the nextPageToken from page 1. + assert.Equal(t, "token-page-2", qs.Get("nextPageToken")) + fixture = "./testdata/search_page2.json" + default: + t.Fatalf("unexpected request count: %d", page) + } + + resp, err := os.ReadFile(fixture) + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 100) + assert.NoError(t, err) + + // Should have made exactly 2 requests. + assert.Equal(t, int32(2), atomic.LoadInt32(&requestCount)) + + assert.True(t, actual.IsLast) + assert.Len(t, actual.Issues, 3) + // Issues should be combined from both pages in order. + assert.Equal(t, "TEST-1", actual.Issues[0].Key) + assert.Equal(t, "TEST-2", actual.Issues[1].Key) + assert.Equal(t, "TEST-3", actual.Issues[2].Key) +} + +func TestSearchAllThreePages(t *testing.T) { + // Verifies pagination across three pages. The chain is: + // page1 (isLast=false, token="token-page-2") -> + // page2 (isLast=false, token="token-page-3") -> + // page3 (isLast=true). + // All issues from all three pages must be accumulated correctly. + var requestCount int32 + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + page := atomic.AddInt32(&requestCount, 1) + qs := r.URL.Query() + + var fixture string + switch page { + case 1: + assert.Empty(t, qs.Get("nextPageToken")) + fixture = "./testdata/search_page1.json" + case 2: + assert.Equal(t, "token-page-2", qs.Get("nextPageToken")) + fixture = "./testdata/search_page2_mid.json" + case 3: + assert.Equal(t, "token-page-3", qs.Get("nextPageToken")) + fixture = "./testdata/search_page3.json" + default: + t.Fatalf("unexpected request count: %d", page) + } + + resp, err := os.ReadFile(fixture) + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 500) + assert.NoError(t, err) + + assert.Equal(t, int32(3), atomic.LoadInt32(&requestCount)) + assert.True(t, actual.IsLast) + assert.Len(t, actual.Issues, 4) + assert.Equal(t, "TEST-1", actual.Issues[0].Key) + assert.Equal(t, "TEST-2", actual.Issues[1].Key) + assert.Equal(t, "TEST-3", actual.Issues[2].Key) + assert.Equal(t, "TEST-4", actual.Issues[3].Key) +} + +func TestSearchAllLimitEnforcement(t *testing.T) { + // Verifies that SearchAll stops fetching additional pages once the totalLimit + // is reached. With a limit of 2 and 2 issues on the first page, the second + // page should not be fetched even though isLast is false. + var requestCount int32 + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + page := atomic.AddInt32(&requestCount, 1) + + var fixture string + switch page { + case 1: + fixture = "./testdata/search_page1.json" + default: + // SearchAll should stop after the first page because we already have 2 issues == limit. + t.Fatalf("should not have fetched page %d; limit was already reached", page) + return + } + + resp, err := os.ReadFile(fixture) + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 2) + assert.NoError(t, err) + + assert.Equal(t, int32(1), atomic.LoadInt32(&requestCount)) + assert.True(t, actual.IsLast) + assert.Len(t, actual.Issues, 2) + assert.Equal(t, "TEST-1", actual.Issues[0].Key) + assert.Equal(t, "TEST-2", actual.Issues[1].Key) +} + +func TestSearchAllLimitTrimsOvershoot(t *testing.T) { + // Verifies that if a page returns more issues than needed to reach totalLimit, + // the result is trimmed to exactly totalLimit. Here we request limit=1 but the + // first page returns 2 issues; the result must be trimmed to 1. + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + qs := r.URL.Query() + // With totalLimit=1, maxResults should be 1 (less than maxPageSize). + assert.Equal(t, "1", qs.Get("maxResults")) + + // Return 2 issues even though maxResults=1 was requested. The API might + // not always honor maxResults exactly, so SearchAll must trim to the limit. + resp, err := os.ReadFile("./testdata/search_page1.json") + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 1) + assert.NoError(t, err) + + assert.True(t, actual.IsLast) + assert.Len(t, actual.Issues, 1) + assert.Equal(t, "TEST-1", actual.Issues[0].Key) +} + +func TestSearchAllLimitLessThanPageSize(t *testing.T) { + // Verifies that when totalLimit < maxPageSize (100), the maxResults parameter + // is set to totalLimit rather than the full page size. This avoids fetching + // more data than needed from the API. + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + qs := r.URL.Query() + assert.Equal(t, "50", qs.Get("maxResults")) + + resp, err := os.ReadFile("./testdata/search.json") + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 50) + assert.NoError(t, err) + + assert.True(t, actual.IsLast) + assert.Len(t, actual.Issues, 3) +} + +func TestSearchAllLimitExceedsPageSize(t *testing.T) { + // Verifies that when totalLimit > maxPageSize (100), the maxResults parameter + // is capped at 100 per request, and pagination is used to collect up to totalLimit. + var requestCount int32 + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + page := atomic.AddInt32(&requestCount, 1) + qs := r.URL.Query() + + // maxResults should be capped at 100 for the first request. + assert.Equal(t, "100", qs.Get("maxResults")) + + var fixture string + switch page { + case 1: + fixture = "./testdata/search_page1.json" + case 2: + fixture = "./testdata/search_page2.json" + default: + t.Fatalf("unexpected request count: %d", page) + } + + resp, err := os.ReadFile(fixture) + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 200) + assert.NoError(t, err) + + assert.Equal(t, int32(2), atomic.LoadInt32(&requestCount)) + assert.True(t, actual.IsLast) + assert.Len(t, actual.Issues, 3) +} + +func TestSearchAllEmptyResults(t *testing.T) { + // Verifies that SearchAll handles the case where the JQL matches no issues. + // The API returns isLast=true with an empty issues array on the first request. + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + resp, err := os.ReadFile("./testdata/search_empty.json") + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=EMPTY ORDER BY created DESC", 100) + assert.NoError(t, err) + + assert.True(t, actual.IsLast) + assert.Empty(t, actual.Issues) +} + +func TestSearchAllEmptyTokenStopsPagination(t *testing.T) { + // Defensive check: if the API returns isLast=false but an empty nextPageToken, + // SearchAll must stop to avoid infinite loops. This guards against malformed + // API responses. + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + // Return isLast=false but no nextPageToken — a contradictory response. + result := SearchResult{ + IsLast: false, + NextPageToken: "", + Issues: []*Issue{ + {Key: "TEST-1", Fields: IssueFields{Summary: "issue 1", Labels: []string{}}}, + }, + } + resp, err := json.Marshal(result) + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 100) + assert.NoError(t, err) + + // Should return what we got rather than looping forever. + assert.True(t, actual.IsLast) + assert.Len(t, actual.Issues, 1) + assert.Equal(t, "TEST-1", actual.Issues[0].Key) +} + +func TestSearchAllSingleIssue(t *testing.T) { + // Verifies SearchAll works correctly when there is exactly one issue total. + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + result := SearchResult{ + IsLast: true, + Issues: []*Issue{ + {Key: "TEST-99", Fields: IssueFields{Summary: "only issue", Labels: []string{}}}, + }, + } + resp, err := json.Marshal(result) + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 100) + assert.NoError(t, err) + + assert.True(t, actual.IsLast) + assert.Len(t, actual.Issues, 1) + assert.Equal(t, "TEST-99", actual.Issues[0].Key) +} + +func TestSearchAllErrorOnFirstPage(t *testing.T) { + // Verifies that if the API returns an error on the first request, + // SearchAll propagates the error immediately with no partial results. + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(400) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 100) + assert.Error(t, err) + assert.Nil(t, actual) +} + +func TestSearchAllErrorMidPagination(t *testing.T) { + // Verifies that if the first page succeeds but the second page returns an error, + // SearchAll returns an error. The implementation does not return partial results; + // it propagates the error from the failing page. + var requestCount int32 + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + page := atomic.AddInt32(&requestCount, 1) + + switch page { + case 1: + resp, err := os.ReadFile("./testdata/search_page1.json") + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + case 2: + // Second page returns an HTTP error. + w.WriteHeader(500) + default: + t.Fatalf("unexpected request count: %d", page) + } + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 200) + assert.Error(t, err) + assert.Nil(t, actual) + // Verify both requests were actually made. + assert.Equal(t, int32(2), atomic.LoadInt32(&requestCount)) +} + +func TestSearchAllInvalidJSON(t *testing.T) { + // Verifies that a malformed JSON response is surfaced as an error. + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write([]byte(`{"isLast": true, "issues": [`)) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 100) + assert.Error(t, err) + assert.Nil(t, actual) +} + +func TestSearchAllPageSizeAdjustment(t *testing.T) { + // Verifies that SearchAll adjusts the maxResults for the last page when the + // remaining count is less than the page size. With totalLimit=3 and 2 issues + // on page 1, the second page request should have maxResults=1. + var requestCount int32 + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + page := atomic.AddInt32(&requestCount, 1) + qs := r.URL.Query() + + switch page { + case 1: + assert.Equal(t, "3", qs.Get("maxResults")) + + resp, err := os.ReadFile("./testdata/search_page1.json") + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + case 2: + // Remaining = 3 - 2 = 1, so maxResults should be adjusted to 1. + assert.Equal(t, "1", qs.Get("maxResults")) + + resp, err := os.ReadFile("./testdata/search_page2.json") + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + default: + t.Fatalf("unexpected request count: %d", page) + } + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 3) + assert.NoError(t, err) + + assert.Equal(t, int32(2), atomic.LoadInt32(&requestCount)) + assert.True(t, actual.IsLast) + assert.Len(t, actual.Issues, 3) +} + +func TestSearchAllPassesNextPageToken(t *testing.T) { + // Verifies that SearchAll correctly URL-encodes and passes the nextPageToken + // from each response to the subsequent request. + var requestCount int32 + // Use tokens that require URL encoding to verify proper handling. + tokens := []string{"", "abc+def/123=", "xyz+uvw/456="} + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + page := atomic.AddInt32(&requestCount, 1) + qs := r.URL.Query() + + // Verify the token sent in this request matches what we expect. + expectedToken := tokens[page-1] + assert.Equal(t, expectedToken, qs.Get("nextPageToken"), + fmt.Sprintf("page %d: unexpected nextPageToken", page)) + + var result SearchResult + switch page { + case 1: + result = SearchResult{ + IsLast: false, + NextPageToken: tokens[1], + Issues: []*Issue{{Key: "TEST-1", Fields: IssueFields{Labels: []string{}}}}, + } + case 2: + result = SearchResult{ + IsLast: false, + NextPageToken: tokens[2], + Issues: []*Issue{{Key: "TEST-2", Fields: IssueFields{Labels: []string{}}}}, + } + case 3: + result = SearchResult{ + IsLast: true, + Issues: []*Issue{{Key: "TEST-3", Fields: IssueFields{Labels: []string{}}}}, + } + default: + t.Fatalf("unexpected request count: %d", page) + } + + resp, err := json.Marshal(result) + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST", 500) + assert.NoError(t, err) + + assert.Equal(t, int32(3), atomic.LoadInt32(&requestCount)) + assert.Len(t, actual.Issues, 3) +} diff --git a/pkg/jira/testdata/search_empty.json b/pkg/jira/testdata/search_empty.json new file mode 100644 index 00000000..36bc4eb1 --- /dev/null +++ b/pkg/jira/testdata/search_empty.json @@ -0,0 +1,5 @@ +{ + "isLast": true, + "nextPageToken": "", + "issues": [] +} diff --git a/pkg/jira/testdata/search_page1.json b/pkg/jira/testdata/search_page1.json new file mode 100644 index 00000000..61d52bab --- /dev/null +++ b/pkg/jira/testdata/search_page1.json @@ -0,0 +1,38 @@ +{ + "isLast": false, + "nextPageToken": "token-page-2", + "issues": [ + { + "key": "TEST-1", + "fields": { + "issuetype": { "name": "Bug" }, + "resolution": null, + "created": "2020-12-03T14:05:20.974+0100", + "priority": { "name": "Medium" }, + "labels": [], + "assignee": null, + "updated": "2020-12-03T14:05:20.974+0100", + "status": { "name": "To Do" }, + "summary": "Bug summary", + "reporter": { "displayName": "Person A" }, + "watches": { "watchCount": 1, "isWatching": true } + } + }, + { + "key": "TEST-2", + "fields": { + "issuetype": { "name": "Story" }, + "resolution": null, + "created": "2020-12-03T14:05:20.974+0100", + "priority": { "name": "High" }, + "labels": ["critical", "feature"], + "assignee": null, + "updated": "2020-12-03T14:05:20.974+0100", + "status": { "name": "In Progress" }, + "summary": "Story summary", + "reporter": { "displayName": "Person B" }, + "watches": { "watchCount": 12, "isWatching": true } + } + } + ] +} diff --git a/pkg/jira/testdata/search_page2.json b/pkg/jira/testdata/search_page2.json new file mode 100644 index 00000000..c29bd8fa --- /dev/null +++ b/pkg/jira/testdata/search_page2.json @@ -0,0 +1,22 @@ +{ + "isLast": true, + "nextPageToken": "", + "issues": [ + { + "key": "TEST-3", + "fields": { + "issuetype": { "name": "Task" }, + "resolution": { "name": "Done" }, + "created": "2020-12-03T14:05:20.974+0100", + "priority": { "name": "Low" }, + "labels": [], + "assignee": { "displayName": "Person A" }, + "updated": "2020-12-03T14:05:20.974+0100", + "status": { "name": "Done" }, + "summary": "Task summary", + "reporter": { "displayName": "Person C" }, + "watches": { "watchCount": 3, "isWatching": false } + } + } + ] +} diff --git a/pkg/jira/testdata/search_page2_mid.json b/pkg/jira/testdata/search_page2_mid.json new file mode 100644 index 00000000..c67d620c --- /dev/null +++ b/pkg/jira/testdata/search_page2_mid.json @@ -0,0 +1,22 @@ +{ + "isLast": false, + "nextPageToken": "token-page-3", + "issues": [ + { + "key": "TEST-3", + "fields": { + "issuetype": { "name": "Task" }, + "resolution": { "name": "Done" }, + "created": "2020-12-03T14:05:20.974+0100", + "priority": { "name": "Low" }, + "labels": [], + "assignee": { "displayName": "Person A" }, + "updated": "2020-12-03T14:05:20.974+0100", + "status": { "name": "Done" }, + "summary": "Task summary", + "reporter": { "displayName": "Person C" }, + "watches": { "watchCount": 3, "isWatching": false } + } + } + ] +} diff --git a/pkg/jira/testdata/search_page3.json b/pkg/jira/testdata/search_page3.json new file mode 100644 index 00000000..e61fefd7 --- /dev/null +++ b/pkg/jira/testdata/search_page3.json @@ -0,0 +1,22 @@ +{ + "isLast": true, + "nextPageToken": "", + "issues": [ + { + "key": "TEST-4", + "fields": { + "issuetype": { "name": "Epic" }, + "resolution": null, + "created": "2020-12-03T14:05:20.974+0100", + "priority": { "name": "High" }, + "labels": ["roadmap"], + "assignee": { "displayName": "Person B" }, + "updated": "2020-12-03T14:05:20.974+0100", + "status": { "name": "In Progress" }, + "summary": "Epic summary", + "reporter": { "displayName": "Person A" }, + "watches": { "watchCount": 5, "isWatching": true } + } + } + ] +} From 07cf5ba8e903d6dbc8490aad32670f151f82ab06 Mon Sep 17 00:00:00 2001 From: "Charles Le (cle-mbp-m3)" Date: Wed, 11 Mar 2026 19:23:45 -0700 Subject: [PATCH 3/4] Address Copilot review: fix totalLimit==0 edge case and error message wording - Handle totalLimit==0 in SearchAll() by defaulting pageSize to maxPageSize and guarding the remaining-count arithmetic to prevent uint underflow - Update errOutOfBounds message to clarify is non-negative, is positive (matching actual validation logic) - Add test for SearchAll with totalLimit=0 Co-Authored-By: Claude Opus 4.6 --- internal/query/issue.go | 2 +- pkg/jira/search.go | 10 +++++---- pkg/jira/search_test.go | 49 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 56 insertions(+), 5 deletions(-) diff --git a/internal/query/issue.go b/internal/query/issue.go index 01a9f6f6..ef76b475 100644 --- a/internal/query/issue.go +++ b/internal/query/issue.go @@ -357,7 +357,7 @@ func getPaginateParams(paginate string) (uint, uint, error) { } errOutOfBounds := fmt.Errorf( - "invalid argument for paginate: and must be positive integers", + "invalid argument for paginate: must be a non-negative integer and must be a positive integer", ) if from < 0 || limit <= 0 { diff --git a/pkg/jira/search.go b/pkg/jira/search.go index a0ef22f3..864a11c0 100644 --- a/pkg/jira/search.go +++ b/pkg/jira/search.go @@ -32,7 +32,7 @@ func (c *Client) SearchAll(jql string, totalLimit uint) (*SearchResult, error) { var allIssues []*Issue pageSize := totalLimit - if pageSize > maxPageSize { + if pageSize == 0 || pageSize > maxPageSize { pageSize = maxPageSize } @@ -60,9 +60,11 @@ func (c *Client) SearchAll(jql string, totalLimit uint) (*SearchResult, error) { nextPageToken = result.NextPageToken // Adjust page size for the last page if needed. - remaining := totalLimit - uint(len(allIssues)) - if remaining < pageSize { - pageSize = remaining + if totalLimit > 0 { + remaining := totalLimit - uint(len(allIssues)) + if remaining < pageSize { + pageSize = remaining + } } } diff --git a/pkg/jira/search_test.go b/pkg/jira/search_test.go index 8aca5bb3..fe4eed02 100644 --- a/pkg/jira/search_test.go +++ b/pkg/jira/search_test.go @@ -630,6 +630,55 @@ func TestSearchAllPageSizeAdjustment(t *testing.T) { assert.Len(t, actual.Issues, 3) } +func TestSearchAllZeroLimit(t *testing.T) { + // Verifies that SearchAll handles totalLimit=0 gracefully by treating it as + // "fetch all available results". The pageSize should default to maxPageSize (100) + // and pagination should continue until isLast=true. + var requestCount int32 + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/rest/api/3/search/jql", r.URL.Path) + + page := atomic.AddInt32(&requestCount, 1) + qs := r.URL.Query() + + // With totalLimit=0, maxResults should default to maxPageSize (100). + assert.Equal(t, "100", qs.Get("maxResults")) + + var fixture string + switch page { + case 1: + assert.Empty(t, qs.Get("nextPageToken")) + fixture = "./testdata/search_page1.json" + case 2: + assert.Equal(t, "token-page-2", qs.Get("nextPageToken")) + fixture = "./testdata/search_page2.json" + default: + t.Fatalf("unexpected request count: %d", page) + } + + resp, err := os.ReadFile(fixture) + assert.NoError(t, err) + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + _, _ = w.Write(resp) + })) + defer server.Close() + + client := NewClient(Config{Server: server.URL}, WithTimeout(3*time.Second)) + + actual, err := client.SearchAll("project=TEST ORDER BY created DESC", 0) + assert.NoError(t, err) + + assert.Equal(t, int32(2), atomic.LoadInt32(&requestCount)) + assert.True(t, actual.IsLast) + assert.Len(t, actual.Issues, 3) + assert.Equal(t, "TEST-1", actual.Issues[0].Key) + assert.Equal(t, "TEST-2", actual.Issues[1].Key) + assert.Equal(t, "TEST-3", actual.Issues[2].Key) +} + func TestSearchAllPassesNextPageToken(t *testing.T) { // Verifies that SearchAll correctly URL-encodes and passes the nextPageToken // from each response to the subsequent request. From 376544abd491bf96b2a00a95fe234d400ff49933 Mon Sep 17 00:00:00 2001 From: cle-mac-studio Date: Tue, 24 Mar 2026 02:26:51 -0700 Subject: [PATCH 4/4] test: rename unused http.Request params to _ in search_test handlers Fixes DeepSource RVV-B0012 (unused parameter) in three test handler closures where only the http.ResponseWriter is needed. --- pkg/jira/search_test.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkg/jira/search_test.go b/pkg/jira/search_test.go index fe4eed02..d4d0d454 100644 --- a/pkg/jira/search_test.go +++ b/pkg/jira/search_test.go @@ -519,7 +519,7 @@ func TestSearchAllSingleIssue(t *testing.T) { func TestSearchAllErrorOnFirstPage(t *testing.T) { // Verifies that if the API returns an error on the first request, // SearchAll propagates the error immediately with no partial results. - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(400) })) defer server.Close() @@ -537,7 +537,7 @@ func TestSearchAllErrorMidPagination(t *testing.T) { // it propagates the error from the failing page. var requestCount int32 - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { page := atomic.AddInt32(&requestCount, 1) switch page { @@ -568,7 +568,7 @@ func TestSearchAllErrorMidPagination(t *testing.T) { func TestSearchAllInvalidJSON(t *testing.T) { // Verifies that a malformed JSON response is surfaced as an error. - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.Header().Set("Content-Type", "application/json") w.WriteHeader(200) _, _ = w.Write([]byte(`{"isLast": true, "issues": [`))