Automatically pause queue if index service is unavailable (#15066)
* Handle keyword search error when issue indexer service is not available * Implement automatic disabling and resume of code indexer queue
This commit is contained in:
parent
2649eddcf0
commit
8038610a42
28 changed files with 522 additions and 151 deletions
|
@ -65,6 +65,7 @@ type Engine interface {
|
|||
Query(...interface{}) ([]map[string][]byte, error)
|
||||
Cols(...string) *xorm.Session
|
||||
Context(ctx context.Context) *xorm.Session
|
||||
Ping() error
|
||||
}
|
||||
|
||||
// TableInfo returns table's information via an object
|
||||
|
|
|
@ -1859,7 +1859,7 @@ func GetRepoIssueStats(repoID, uid int64, filterMode int, isPull bool) (numOpen,
|
|||
}
|
||||
|
||||
// SearchIssueIDsByKeyword search issues on database
|
||||
func SearchIssueIDsByKeyword(kw string, repoIDs []int64, limit, start int) (int64, []int64, error) {
|
||||
func SearchIssueIDsByKeyword(ctx context.Context, kw string, repoIDs []int64, limit, start int) (int64, []int64, error) {
|
||||
repoCond := builder.In("repo_id", repoIDs)
|
||||
subQuery := builder.Select("id").From("issue").Where(repoCond)
|
||||
kw = strings.ToUpper(kw)
|
||||
|
@ -1884,7 +1884,7 @@ func SearchIssueIDsByKeyword(kw string, repoIDs []int64, limit, start int) (int6
|
|||
ID int64
|
||||
UpdatedUnix int64
|
||||
}, 0, limit)
|
||||
err := db.GetEngine(db.DefaultContext).Distinct("id", "updated_unix").Table("issue").Where(cond).
|
||||
err := db.GetEngine(ctx).Distinct("id", "updated_unix").Table("issue").Where(cond).
|
||||
OrderBy("`updated_unix` DESC").Limit(limit, start).
|
||||
Find(&res)
|
||||
if err != nil {
|
||||
|
@ -1894,7 +1894,7 @@ func SearchIssueIDsByKeyword(kw string, repoIDs []int64, limit, start int) (int6
|
|||
ids = append(ids, r.ID)
|
||||
}
|
||||
|
||||
total, err := db.GetEngine(db.DefaultContext).Distinct("id").Table("issue").Where(cond).Count()
|
||||
total, err := db.GetEngine(ctx).Distinct("id").Table("issue").Where(cond).Count()
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sort"
|
||||
"sync"
|
||||
|
@ -303,23 +304,23 @@ func TestIssue_loadTotalTimes(t *testing.T) {
|
|||
|
||||
func TestIssue_SearchIssueIDsByKeyword(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
total, ids, err := SearchIssueIDsByKeyword("issue2", []int64{1}, 10, 0)
|
||||
total, ids, err := SearchIssueIDsByKeyword(context.TODO(), "issue2", []int64{1}, 10, 0)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 1, total)
|
||||
assert.EqualValues(t, []int64{2}, ids)
|
||||
|
||||
total, ids, err = SearchIssueIDsByKeyword("first", []int64{1}, 10, 0)
|
||||
total, ids, err = SearchIssueIDsByKeyword(context.TODO(), "first", []int64{1}, 10, 0)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 1, total)
|
||||
assert.EqualValues(t, []int64{1}, ids)
|
||||
|
||||
total, ids, err = SearchIssueIDsByKeyword("for", []int64{1}, 10, 0)
|
||||
total, ids, err = SearchIssueIDsByKeyword(context.TODO(), "for", []int64{1}, 10, 0)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 5, total)
|
||||
assert.ElementsMatch(t, []int64{1, 2, 3, 5, 11}, ids)
|
||||
|
||||
// issue1's comment id 2
|
||||
total, ids, err = SearchIssueIDsByKeyword("good", []int64{1}, 10, 0)
|
||||
total, ids, err = SearchIssueIDsByKeyword(context.TODO(), "good", []int64{1}, 10, 0)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, 1, total)
|
||||
assert.EqualValues(t, []int64{1}, ids)
|
||||
|
@ -464,7 +465,7 @@ func TestCorrectIssueStats(t *testing.T) {
|
|||
wg.Wait()
|
||||
|
||||
// Now we will get all issueID's that match the "Bugs are nasty" query.
|
||||
total, ids, err := SearchIssueIDsByKeyword("Bugs are nasty", []int64{1}, issueAmount, 0)
|
||||
total, ids, err := SearchIssueIDsByKeyword(context.TODO(), "Bugs are nasty", []int64{1}, issueAmount, 0)
|
||||
|
||||
// Just to be sure.
|
||||
assert.NoError(t, err)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue