Skip to content

Commit d048caf

Browse files
feat: add filter and pagination for the jobs handler
1 parent 866b853 commit d048caf

1 file changed

Lines changed: 99 additions & 2 deletions

File tree

main.go

Lines changed: 99 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ import (
1414
"os"
1515
"os/signal"
1616
"slices"
17+
"strconv"
1718
"strings"
1819
"syscall"
1920
"time"
@@ -110,6 +111,65 @@ func newJobsHandler(jobs *safeMap[string, RequestState]) http.Handler {
110111
CompletedAt time.Time `json:"completed_at,omitzero"`
111112
}
112113

114+
allowedFilters := []string{
115+
string(execStateRunning),
116+
string(execStateQueued),
117+
string(execStateCompleted),
118+
string(execStateFailed),
119+
string(execStateCanceled),
120+
}
121+
122+
validateFilters := func(filters []string) bool {
123+
if len(filters) == 0 {
124+
return true
125+
}
126+
127+
for _, f := range filters {
128+
if !slices.Contains(allowedFilters, strings.ToLower(f)) {
129+
return false
130+
}
131+
}
132+
133+
return true
134+
}
135+
136+
paginate := func(w http.ResponseWriter, r *http.Request, summary []JobsSummary, cursor string, limit int) {
137+
start := slices.IndexFunc(summary, func(e JobsSummary) bool {
138+
return cursor == "" || e.ID == cursor // no cursor means serve first page
139+
})
140+
141+
if start == -1 {
142+
http.Error(w, "cursor does not exists", http.StatusBadRequest)
143+
144+
return
145+
}
146+
147+
end := min(start+limit, len(summary))
148+
149+
page := summary[start:end]
150+
151+
if end < len(summary) {
152+
nextCursor := summary[end].ID
153+
154+
u := *r.URL
155+
q := u.Query()
156+
q.Set("cursor", nextCursor)
157+
q.Set("limit", strconv.Itoa(limit))
158+
u.RawQuery = q.Encode()
159+
160+
scheme := "http"
161+
if r.TLS != nil {
162+
scheme = "https"
163+
}
164+
165+
u.Scheme, u.Host = scheme, r.Host
166+
167+
w.Header().Set("Link", fmt.Sprintf("<%s>; rel=\"next\"", u.String()))
168+
}
169+
170+
writeJSON(w, http.StatusOK, page)
171+
}
172+
113173
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
114174
if r.Method != http.MethodGet {
115175
w.Header().Set("Allow", http.MethodGet)
@@ -119,8 +179,27 @@ func newJobsHandler(jobs *safeMap[string, RequestState]) http.Handler {
119179
}
120180

121181
summary := make([]JobsSummary, 0, jobs.len())
182+
filters := make([]string, 0, len(allowedFilters))
183+
184+
for _, s := range r.URL.Query()["filter"] {
185+
filters = append(filters, strings.Split(s, ",")...)
186+
}
187+
188+
if !validateFilters(filters) {
189+
http.Error(
190+
w,
191+
"allowed filters: "+strings.Join(allowedFilters, ","),
192+
http.StatusBadRequest,
193+
)
194+
195+
return
196+
}
122197

123198
jobs.safeRange(func(k string, v RequestState) {
199+
if len(filters) > 0 && !slices.Contains(filters, string(v.State)) {
200+
return
201+
}
202+
124203
summary = append(summary, JobsSummary{
125204
ID: k,
126205
Path: v.Path,
@@ -135,10 +214,28 @@ func newJobsHandler(jobs *safeMap[string, RequestState]) http.Handler {
135214
})
136215

137216
slices.SortFunc(summary, func(a, b JobsSummary) int {
138-
return a.StartedAt.Compare(b.StartedAt)
217+
return b.StartedAt.Compare(a.StartedAt) // descent
139218
})
140219

141-
writeJSON(w, http.StatusOK, summary)
220+
var (
221+
limit = r.URL.Query().Get("limit")
222+
cursor = r.URL.Query().Get("cursor")
223+
)
224+
225+
if limit == "" {
226+
writeJSON(w, http.StatusOK, summary)
227+
228+
return
229+
}
230+
231+
l, err := strconv.Atoi(limit)
232+
if err != nil {
233+
http.Error(w, fmt.Sprintf("invalid limit: %s", err), http.StatusBadRequest)
234+
235+
return
236+
}
237+
238+
paginate(w, r, summary, cursor, l)
142239
})
143240
}
144241

0 commit comments

Comments
 (0)