Add more linters to improve code readability (#19989)

Add nakedret, unconvert, wastedassign, stylecheck and nolintlint linters to improve code readability

- nakedret - https://github.com/alexkohler/nakedret - nakedret is a Go static analysis tool to find naked returns in functions greater than a specified function length.
- unconvert - https://github.com/mdempsky/unconvert - Remove unnecessary type conversions
- wastedassign - https://github.com/sanposhiho/wastedassign -  wastedassign finds wasted assignment statements.
- notlintlint -  Reports ill-formed or insufficient nolint directives
- stylecheck - https://staticcheck.io/docs/checks/#ST - keep style consistent
  - excluded: [ST1003 - Poorly chosen identifier](https://staticcheck.io/docs/checks/#ST1003) and [ST1005 - Incorrectly formatted error string](https://staticcheck.io/docs/checks/#ST1005)
This commit is contained in:
Wim 2022-06-20 12:02:49 +02:00 committed by GitHub
parent 3289abcefc
commit cb50375e2b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
147 changed files with 402 additions and 397 deletions

View file

@ -92,7 +92,7 @@ func NewClient(user *user_model.User, pubID string) (c *Client, err error) {
priv: privParsed,
pubID: pubID,
}
return
return c, err
}
// NewRequest function
@ -110,7 +110,7 @@ func (c *Client) NewRequest(b []byte, to string) (req *http.Request, err error)
return
}
err = signer.SignRequest(c.priv, c.pubID, req, b)
return
return req, err
}
// Post function
@ -120,5 +120,5 @@ func (c *Client) Post(b []byte, to string) (resp *http.Response, err error) {
return
}
resp, err = c.client.Do(req)
return
return resp, err
}

View file

@ -35,11 +35,11 @@ func GetKeyPair(user *user_model.User) (pub, priv string, err error) {
// GetPublicKey function returns a user's public key
func GetPublicKey(user *user_model.User) (pub string, err error) {
pub, _, err = GetKeyPair(user)
return
return pub, err
}
// GetPrivateKey function returns a user's private key
func GetPrivateKey(user *user_model.User) (priv string, err error) {
_, priv, err = GetKeyPair(user)
return
return priv, err
}

View file

@ -55,7 +55,7 @@ func isDecimal(r rune) bool {
}
func compareByNumbers(str1 string, pos1 int, str2 string, pos2 int) (i1, i2 int, less, equal bool) {
var d1, d2 bool = true, true
d1, d2 := true, true
var dec1, dec2 string
for d1 || d2 {
if d1 {

View file

@ -296,11 +296,11 @@ func TestDetectEncoding(t *testing.T) {
}
func stringMustStartWith(t *testing.T, expected, value string) {
assert.Equal(t, expected, string(value[:len(expected)]))
assert.Equal(t, expected, value[:len(expected)])
}
func stringMustEndWith(t *testing.T, expected, value string) {
assert.Equal(t, expected, string(value[len(value)-len(expected):]))
assert.Equal(t, expected, value[len(value)-len(expected):])
}
func bytesMustStartWith(t *testing.T, expected, value []byte) {

View file

@ -222,15 +222,15 @@ readingloop:
return
}
escaped.HasError = true
return
return escaped, err
}
func writeBroken(output io.Writer, bs []byte) (err error) {
_, err = fmt.Fprintf(output, `<span class="broken-code-point">&lt;%X&gt;</span>`, bs)
return
return err
}
func writeEscaped(output io.Writer, r rune) (err error) {
_, err = fmt.Fprintf(output, `<span class="escaped-code-point" data-escaped="[U+%04X]"><span class="char">%c</span></span>`, r, r)
return
return err
}

View file

@ -340,7 +340,7 @@ func ReferencesGitRepo(allowEmpty ...bool) func(ctx *APIContext) (cancel context
}
}
return
return cancel
}
}

View file

@ -82,5 +82,5 @@ func PrivateContexter() func(http.Handler) http.Handler {
func OverrideContext(ctx *PrivateContext) (cancel context.CancelFunc) {
// We now need to override the request context as the base for our work because even if the request is cancelled we have to continue this work
ctx.Override, _, cancel = process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), fmt.Sprintf("PrivateContext: %s", ctx.Req.RequestURI), process.RequestProcessType, true)
return
return cancel
}

View file

@ -734,7 +734,7 @@ func RepoAssignment(ctx *Context) (cancel context.CancelFunc) {
ctx.Data["GoDocDirectory"] = prefix + "{/dir}"
ctx.Data["GoDocFile"] = prefix + "{/dir}/{file}#L{line}"
}
return
return cancel
}
// RepoRefType type of repo reference
@ -1001,7 +1001,7 @@ func RepoRefByType(refType RepoRefType, ignoreNotExistErr ...bool) func(*Context
return
}
ctx.Data["CommitsCount"] = ctx.Repo.CommitsCount
return
return cancel
}
}

View file

@ -52,5 +52,5 @@ func parseTime(value string) (int64, error) {
func prepareQueryArg(ctx *Context, name string) (value string, err error) {
value, err = url.PathUnescape(ctx.FormString(name))
value = strings.TrimSpace(value)
return
return value, err
}

View file

@ -257,7 +257,7 @@ func ToHook(repoLink string, w *webhook.Webhook) *api.Hook {
return &api.Hook{
ID: w.ID,
Type: string(w.Type),
Type: w.Type,
URL: fmt.Sprintf("%s/settings/hooks/%d", repoLink, w.ID),
Active: w.IsActive,
Config: config,

View file

@ -123,7 +123,7 @@ func ToTrackedTime(t *issues_model.TrackedTime) (apiT *api.TrackedTime) {
if t.User != nil {
apiT.UserName = t.User.Name
}
return
return apiT
}
// ToStopWatches convert Stopwatch list to api.StopWatches

View file

@ -216,7 +216,7 @@ func fixBrokenRepoUnit16961(repoUnit *repo_model.RepoUnit, bs []byte) (fixed boo
return false, nil
}
switch unit.Type(repoUnit.Type) {
switch repoUnit.Type {
case unit.TypeCode, unit.TypeReleases, unit.TypeWiki, unit.TypeProjects:
cfg := &repo_model.UnitConfig{}
repoUnit.Config = cfg

View file

@ -18,7 +18,7 @@ func wrapNewlines(w io.Writer, prefix, value []byte) (sum int64, err error) {
if len(value) == 0 {
return
}
n := 0
var n int
last := 0
for j := bytes.IndexByte(value, '\n'); j > -1; j = bytes.IndexByte(value[last:], '\n') {
n, err = w.Write(prefix)
@ -45,7 +45,7 @@ func wrapNewlines(w io.Writer, prefix, value []byte) (sum int64, err error) {
}
n, err = w.Write([]byte("\n"))
sum += int64(n)
return
return sum, err
}
// Event is an eventsource event, not all fields need to be set
@ -64,7 +64,7 @@ type Event struct {
// The return value n is the number of bytes written. Any error encountered during the write is also returned.
func (e *Event) WriteTo(w io.Writer) (int64, error) {
sum := int64(0)
nint := 0
var nint int
n, err := wrapNewlines(w, []byte("event: "), []byte(e.Name))
sum += n
if err != nil {

View file

@ -176,12 +176,12 @@ func ReadBatchLine(rd *bufio.Reader) (sha []byte, typ string, size int64, err er
typ = typ[:idx]
size, err = strconv.ParseInt(sizeStr, 10, 64)
return
return sha, typ, size, err
}
// ReadTagObjectID reads a tag object ID hash from a cat-file --batch stream, throwing away the rest of the stream.
func ReadTagObjectID(rd *bufio.Reader, size int64) (string, error) {
id := ""
var id string
var n int64
headerLoop:
for {
@ -216,7 +216,7 @@ headerLoop:
// ReadTreeID reads a tree ID from a cat-file --batch stream, throwing away the rest of the stream.
func ReadTreeID(rd *bufio.Reader, size int64) (string, error) {
id := ""
var id string
var n int64
headerLoop:
for {
@ -328,7 +328,7 @@ func ParseTreeLine(rd *bufio.Reader, modeBuf, fnameBuf, shaBuf []byte) (mode, fn
// Deal with the 20-byte SHA
idx = 0
for idx < 20 {
read := 0
var read int
read, err = rd.Read(shaBuf[idx:20])
n += read
if err != nil {
@ -337,7 +337,7 @@ func ParseTreeLine(rd *bufio.Reader, modeBuf, fnameBuf, shaBuf []byte) (mode, fn
idx += read
}
sha = shaBuf
return
return mode, fname, sha, n, err
}
var callerPrefix string

View file

@ -99,7 +99,7 @@ func (b *blobReader) Read(p []byte) (n int, err error) {
}
n, err = b.rd.Read(p)
b.n -= int64(n)
return
return n, err
}
// Close implements io.Closer

View file

@ -418,7 +418,7 @@ func (c *Commit) LoadBranchName() (err error) {
}
c.Branch, err = c.GetBranchName()
return
return err
}
// GetTagName gets the current tag name for given commit

View file

@ -157,7 +157,7 @@ func GetLastCommitForPaths(ctx context.Context, cache *LastCommitCache, commit *
if typ != "commit" {
return nil, fmt.Errorf("unexpected type: %s for commit id: %s", typ, commitID)
}
c, err = CommitFromReader(commit.repo, MustIDFromString(string(commitID)), io.LimitReader(batchReader, int64(size)))
c, err = CommitFromReader(commit.repo, MustIDFromString(commitID), io.LimitReader(batchReader, size))
if err != nil {
return nil, err
}

View file

@ -115,7 +115,7 @@ func ParseDiffHunkString(diffhunk string) (leftLine, leftHunk, rightLine, righHu
rightLine = leftLine
righHunk = leftHunk
}
return
return leftLine, leftHunk, rightLine, righHunk
}
// Example: @@ -1,8 +1,9 @@ => [..., 1, 8, 1, 9]

View file

@ -116,7 +116,7 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) {
continue
case "commit":
// Read in the commit to get its tree and in case this is one of the last used commits
curCommit, err = git.CommitFromReader(repo, git.MustIDFromString(string(commitID)), io.LimitReader(batchReader, int64(size)))
curCommit, err = git.CommitFromReader(repo, git.MustIDFromString(string(commitID)), io.LimitReader(batchReader, size))
if err != nil {
return nil, err
}

View file

@ -334,7 +334,7 @@ func (wr *lineSeparatedAttributeWriter) Write(p []byte) (n int, err error) {
wr.tmp = []byte(remaining[3:])
break
}
return l, fmt.Errorf("unexpected tail %s", string(remaining))
return l, fmt.Errorf("unexpected tail %s", remaining)
}
_, _ = sb.WriteRune(rn)
remaining = tail

View file

@ -101,5 +101,5 @@ func (repo *Repository) Close() (err error) {
repo.checkReader = nil
repo.checkWriter = nil
}
return
return err
}

View file

@ -95,7 +95,7 @@ func callShowRef(ctx context.Context, repoPath, prefix, arg string, skip, limit
return nil
})
return
return branchNames, countAll, err
}
func walkShowRef(ctx context.Context, repoPath, arg string, skip, limit int, walkfn func(sha1, refname string) error) (countAll int, err error) {

View file

@ -132,7 +132,7 @@ type lineCountWriter struct {
func (l *lineCountWriter) Write(p []byte) (n int, err error) {
n = len(p)
l.numLines += bytes.Count(p, []byte{'\000'})
return
return n, err
}
// GetDiffNumChangedFiles counts the number of changed files
@ -177,7 +177,7 @@ func (repo *Repository) GetDiffShortStat(base, head string) (numFiles, totalAddi
if err != nil && strings.Contains(err.Error(), "no merge base") {
return GetDiffShortStat(repo.Ctx, repo.Path, base, head)
}
return
return numFiles, totalAdditions, totalDeletions, err
}
// GetDiffShortStat counts number of changed files, number of additions and deletions
@ -231,7 +231,7 @@ func parseDiffStat(stdout string) (numFiles, totalAdditions, totalDeletions int,
return 0, 0, 0, fmt.Errorf("unable to parse shortstat: %s. Error parsing NumDeletions %v", stdout, err)
}
}
return
return numFiles, totalAdditions, totalDeletions, err
}
// GetDiffOrPatch generates either diff or formatted patch data between given revisions

View file

@ -117,8 +117,8 @@ func TestReadWritePullHead(t *testing.T) {
return
}
assert.Len(t, string(headContents), 40)
assert.True(t, string(headContents) == newCommit)
assert.Len(t, headContents, 40)
assert.True(t, headContents == newCommit)
// Remove file after the test
err = repo.RemoveReference(PullPrefix + "1/head")

View file

@ -64,7 +64,7 @@ func (repo *Repository) ReadTreeToTemporaryIndex(treeish string) (filename, tmpD
defer cancel()
return "", "", func() {}, err
}
return
return filename, tmpDir, cancel, err
}
// EmptyIndex empties the index

View file

@ -27,7 +27,7 @@ func (repo *Repository) IsTagExist(name string) bool {
// returning at most limit tags, or all if limit is 0.
func (repo *Repository) GetTags(skip, limit int) (tags []string, err error) {
tags, _, err = callShowRef(repo.Ctx, repo.Path, TagPrefix, "--tags", skip, limit)
return
return tags, err
}
// GetTagType gets the type of the tag, either commit (simple) or tag (annotated)

View file

@ -58,5 +58,5 @@ func NewHasher(t ObjectType, size int64) Hasher {
// Sum generates a SHA1 for the provided hash
func (h Hasher) Sum() (sha1 SHA1) {
copy(sha1[:], h.Hash.Sum(nil))
return
return sha1
}

View file

@ -91,5 +91,5 @@ func newSignatureFromCommitline(line []byte) (sig *Signature, err error) {
return
}
}
return
return sig, err
}

View file

@ -163,7 +163,7 @@ func (l *LimitedReaderCloser) Read(p []byte) (n int, err error) {
}
n, err = l.R.Read(p)
l.N -= int64(n)
return
return n, err
}
// Close implements io.Closer

View file

@ -53,7 +53,7 @@ func BenchmarkParseGlyphs(b *testing.B) {
parser := &Parser{}
parser.Reset()
tgBytes := []byte(testglyphs)
tg := tgBytes
var tg []byte
for i := 0; i < b.N; i++ {
parser.Reset()
tg = tgBytes

View file

@ -26,7 +26,7 @@ func NewChannelContext(done <-chan struct{}, err error) *ChannelContext {
// Deadline returns the time when work done on behalf of this context
// should be canceled. There is no Deadline for a ChannelContext
func (ctx *ChannelContext) Deadline() (deadline time.Time, ok bool) {
return
return deadline, ok
}
// Done returns the channel provided at the creation of this context.

View file

@ -114,7 +114,7 @@ func CodeFromLexer(lexer chroma.Lexer, code string) string {
htmlbuf := bytes.Buffer{}
htmlw := bufio.NewWriter(&htmlbuf)
iterator, err := lexer.Tokenise(nil, string(code))
iterator, err := lexer.Tokenise(nil, code)
if err != nil {
log.Error("Can't tokenize code: %v", err)
return code
@ -197,7 +197,7 @@ func File(numLines int, fileName, language string, code []byte) []string {
m := make([]string, 0, numLines)
for _, v := range strings.SplitN(htmlbuf.String(), "\n", numLines) {
content := string(v)
content := v
// need to keep lines that are only \n so copy/paste works properly in browser
if content == "" {
content = "\n"
@ -220,8 +220,8 @@ func File(numLines int, fileName, language string, code []byte) []string {
// return unhiglighted map
func plainText(code string, numLines int) []string {
m := make([]string, 0, numLines)
for _, v := range strings.SplitN(string(code), "\n", numLines) {
content := string(v)
for _, v := range strings.SplitN(code, "\n", numLines) {
content := v
// need to keep lines that are only \n so copy/paste works properly in browser
if content == "" {
content = "\n"

View file

@ -392,7 +392,7 @@ func (b *BleveIndexer) Search(ctx context.Context, repoIDs []int64, language, ke
searchResults := make([]*SearchResult, len(result.Hits))
for i, hit := range result.Hits {
var startIndex, endIndex int = -1, -1
startIndex, endIndex := -1, -1
for _, locations := range hit.Locations["Content"] {
location := locations[0]
locationStart := int(location.Start)

View file

@ -348,7 +348,7 @@ func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int)
// FIXME: There is no way to get the position the keyword on the content currently on the same request.
// So we get it from content, this may made the query slower. See
// https://discuss.elastic.co/t/fetching-position-of-keyword-in-matched-document/94291
var startIndex, endIndex int = -1, -1
var startIndex, endIndex int
c, ok := hit.Highlight["content"]
if ok && len(c) > 0 {
// FIXME: Since the highlighting content will include <em> and </em> for the keywords,

View file

@ -203,9 +203,8 @@ func (b *footnoteBlockParser) Open(parent ast.Node, reader text.Reader, pc parse
return nil, parser.NoChildren
}
open := pos + 1
closes := 0
closure := util.FindClosure(line[pos+1:], '[', ']', false, false) //nolint
closes = pos + 1 + closure
closes := pos + 1 + closure
next := closes + 1
if closure > -1 {
if next >= len(line) || line[next] != ':' {

View file

@ -156,7 +156,7 @@ func actualRender(ctx *markup.RenderContext, input io.Reader, output io.Writer)
log.Warn("Unable to render markdown due to panic in goldmark: %v", err)
if log.IsDebug() {
log.Debug("Panic in markdown: %v\n%s", err, string(log.Stack(2)))
log.Debug("Panic in markdown: %v\n%s", err, log.Stack(2))
}
}()
@ -185,7 +185,7 @@ func render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error
log.Warn("Unable to render markdown due to panic in goldmark - will return raw bytes")
if log.IsDebug() {
log.Debug("Panic in markdown: %v\n%s", err, string(log.Stack(2)))
log.Debug("Panic in markdown: %v\n%s", err, log.Stack(2))
}
_, err = io.Copy(output, input)
if err != nil {

View file

@ -75,7 +75,7 @@ func Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error
if lexer == nil {
// include language-x class as part of commonmark spec
if _, err := w.WriteString(`<code class="chroma language-` + string(lang) + `">`); err != nil {
if _, err := w.WriteString(`<code class="chroma language-` + lang + `">`); err != nil {
return ""
}
if _, err := w.WriteString(html.EscapeString(source)); err != nil {
@ -83,7 +83,7 @@ func Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error
}
} else {
// include language-x class as part of commonmark spec
if _, err := w.WriteString(`<code class="chroma language-` + string(lang) + `">`); err != nil {
if _, err := w.WriteString(`<code class="chroma language-` + lang + `">`); err != nil {
return ""
}
lexer = chroma.Coalesce(lexer)

View file

@ -55,7 +55,7 @@ func Test_Sanitizer(t *testing.T) {
func TestSanitizeNonEscape(t *testing.T) {
descStr := "<scrİpt>&lt;script&gt;alert(document.domain)&lt;/script&gt;</scrİpt>"
output := template.HTML(Sanitize(string(descStr)))
output := template.HTML(Sanitize(descStr))
if strings.Contains(string(output), "<script>") {
t.Errorf("un-escaped <script> in output: %q", output)
}

View file

@ -30,11 +30,11 @@ type Issue struct {
}
// GetExternalName ExternalUserMigrated interface
func (i *Issue) GetExternalName() string { return i.PosterName }
func (issue *Issue) GetExternalName() string { return issue.PosterName }
// GetExternalID ExternalUserMigrated interface
func (i *Issue) GetExternalID() int64 { return i.PosterID }
func (issue *Issue) GetExternalID() int64 { return issue.PosterID }
func (i *Issue) GetLocalIndex() int64 { return i.Number }
func (i *Issue) GetForeignIndex() int64 { return i.ForeignIndex }
func (i *Issue) GetContext() DownloaderContext { return i.Context }
func (issue *Issue) GetLocalIndex() int64 { return issue.Number }
func (issue *Issue) GetForeignIndex() int64 { return issue.ForeignIndex }
func (issue *Issue) GetContext() DownloaderContext { return issue.Context }

View file

@ -75,5 +75,5 @@ func valToTimeDuration(vs []string) (result time.Duration) {
return
}
}
return
return result
}

View file

@ -72,7 +72,7 @@ func (m *Manager) GetLevelDB(connection string) (db *leveldb.DB, err error) {
if recovered != nil {
panic(recovered)
}
return
return db, err
}
func (m *Manager) getLevelDB(connection string) (*leveldb.DB, error) {

View file

@ -65,7 +65,7 @@ func (m *Manager) GetRedisClient(connection string) (client redis.UniversalClien
if recovered != nil {
panic(recovered)
}
return
return client
}
func (m *Manager) getRedisClient(connection string) redis.UniversalClient {

View file

@ -119,5 +119,5 @@ func (h *MultiHasher) Sums() (hashMD5, hashSHA1, hashSHA256, hashSHA512 []byte)
hashSHA1 = h.sha1.Sum(nil)
hashSHA256 = h.sha256.Sum(nil)
hashSHA512 = h.sha512.Sum(nil)
return
return hashMD5, hashSHA1, hashSHA256, hashSHA512
}

View file

@ -183,7 +183,7 @@ func (pm *Manager) nextPID() (start time.Time, pid IDType) {
return
}
pid = IDType(string(pid) + "-" + strconv.FormatInt(pm.next, 10))
return
return start, pid
}
// Remove a process from the ProcessManager.

View file

@ -120,7 +120,7 @@ func (pm *Manager) ProcessStacktraces(flat, noSystem bool) ([]*Process, int, int
// We cannot use the pm.ProcessMap here because we will release the mutex ...
processMap := map[IDType]*Process{}
processCount := 0
var processCount int
// Lock the manager
pm.mutex.Lock()

View file

@ -74,7 +74,7 @@ func unmarshalAs(bs []byte, exemplar interface{}) (data Data, err error) {
} else {
err = json.Unmarshal(bs, &data)
}
return
return data, err
}
// assignableTo will check if provided data is assignable to the same type as the exemplar

View file

@ -73,7 +73,7 @@ func NewByteFIFOQueue(typ Type, byteFIFO ByteFIFO, handle HandlerFunc, cfg, exem
failed = append(failed, fail)
}
}
return
return failed
}, config.WorkerPoolConfiguration)
return q, nil
@ -401,7 +401,7 @@ func NewByteFIFOUniqueQueue(typ Type, byteFIFO UniqueByteFIFO, handle HandlerFun
failed = append(failed, fail)
}
}
return
return failed
}, config.WorkerPoolConfiguration)
return q, nil

View file

@ -62,7 +62,7 @@ func NewPersistableChannelQueue(handle HandlerFunc, cfg, exemplar interface{}) (
failed = append(failed, fail)
}
}
return
return failed
}
channelQueue, err := NewChannelQueue(wrappedHandle, ChannelQueueConfiguration{

View file

@ -62,7 +62,7 @@ func NewPersistableChannelUniqueQueue(handle HandlerFunc, cfg, exemplar interfac
failed = append(failed, fail)
}
}
return
return failed
}
channelUniqueQueue, err := NewChannelUniqueQueue(wrappedHandle, ChannelUniqueQueueConfiguration{

View file

@ -379,7 +379,7 @@ func FindRenderizableReferenceAlphanumeric(content string) (bool, *RenderizableR
action, location := findActionKeywords([]byte(content), match[2])
return true, &RenderizableReference{
Issue: string(content[match[2]:match[3]]),
Issue: content[match[2]:match[3]],
RefLocation: &RefSpan{Start: match[2], End: match[3]},
Action: action,
ActionLocation: location,
@ -506,7 +506,7 @@ func getCrossReference(content []byte, start, end int, fromLink, prOnly bool) *r
}
repo := string(content[start : start+sep])
issue := string(content[start+sep+1 : end])
index, err := strconv.ParseInt(string(issue), 10, 64)
index, err := strconv.ParseInt(issue, 10, 64)
if err != nil {
return nil
}

View file

@ -104,7 +104,7 @@ done
giteaHookTpls = append(giteaHookTpls, "")
}
return
return hookNames, hookTpls, giteaHookTpls
}
// CreateDelegateHooks creates all the hooks scripts for the repo

View file

@ -107,7 +107,7 @@ func InitDBConfig() {
// DBConnStr returns database connection string
func DBConnStr() (string, error) {
connStr := ""
var connStr string
Param := "?"
if strings.Contains(Database.Name, Param) {
Param = "&"
@ -168,7 +168,7 @@ func getPostgreSQLConnectionString(dbHost, dbUser, dbPasswd, dbName, dbParam, db
connStr = fmt.Sprintf("postgres://%s:%s@%s:%s/%s%ssslmode=%s",
url.PathEscape(dbUser), url.PathEscape(dbPasswd), host, port, dbName, dbParam, dbsslMode)
}
return
return connStr
}
// ParseMSSQLHostPort splits the host into host and port

View file

@ -31,7 +31,7 @@ var (
}
)
// Constant slice of httpsig algorithm objects
// HttpsigAlgs is a constant slice of httpsig algorithm objects
var HttpsigAlgs []httpsig.Algorithm
func newFederationService() {

View file

@ -40,12 +40,12 @@ func defaultI18nLangs() (res []string) {
for i := 0; i < len(defaultI18nLangNames); i += 2 {
res = append(res, defaultI18nLangNames[i])
}
return
return res
}
func defaultI18nNames() (res []string) {
for i := 0; i < len(defaultI18nLangNames); i += 2 {
res = append(res, defaultI18nLangNames[i+1])
}
return
return res
}

View file

@ -211,7 +211,7 @@ func generateLogConfig(sec *ini.Section, name string, defaults defaultLogOptions
return
}
jsonConfig = string(byteConfig)
return
return mode, jsonConfig, levelName
}
func generateNamedLogger(key string, options defaultLogOptions) *LogDescription {

View file

@ -96,7 +96,7 @@ func (a AllowedVisibility) ToVisibleTypeSlice() (result []structs.VisibleType) {
result = append(result, structs.VisibleType(i))
}
}
return
return result
}
func newService() {

View file

@ -169,35 +169,35 @@ func NewStorage(typStr string, cfg interface{}) (ObjectStorage, error) {
func initAvatars() (err error) {
log.Info("Initialising Avatar storage with type: %s", setting.Avatar.Storage.Type)
Avatars, err = NewStorage(setting.Avatar.Storage.Type, &setting.Avatar.Storage)
return
return err
}
func initAttachments() (err error) {
log.Info("Initialising Attachment storage with type: %s", setting.Attachment.Storage.Type)
Attachments, err = NewStorage(setting.Attachment.Storage.Type, &setting.Attachment.Storage)
return
return err
}
func initLFS() (err error) {
log.Info("Initialising LFS storage with type: %s", setting.LFS.Storage.Type)
LFS, err = NewStorage(setting.LFS.Storage.Type, &setting.LFS.Storage)
return
return err
}
func initRepoAvatars() (err error) {
log.Info("Initialising Repository Avatar storage with type: %s", setting.RepoAvatar.Storage.Type)
RepoAvatars, err = NewStorage(setting.RepoAvatar.Storage.Type, &setting.RepoAvatar.Storage)
return
return err
}
func initRepoArchives() (err error) {
log.Info("Initialising Repository Archive storage with type: %s", setting.RepoArchive.Storage.Type)
RepoArchives, err = NewStorage(setting.RepoArchive.Storage.Type, &setting.RepoArchive.Storage)
return
return err
}
func initPackages() (err error) {
log.Info("Initialising Packages storage with type: %s", setting.Packages.Storage.Type)
Packages, err = NewStorage(setting.Packages.Storage.Type, &setting.Packages.Storage)
return
return err
}

View file

@ -55,5 +55,5 @@ func ExtractKeysFromMapString(in map[string]VisibleType) (keys []string) {
for k := range in {
keys = append(keys, k)
}
return
return keys
}

View file

@ -733,7 +733,7 @@ func RenderCommitMessageLink(ctx context.Context, msg, urlPrefix, urlDefault str
log.Error("RenderCommitMessage: %v", err)
return ""
}
msgLines := strings.Split(strings.TrimSpace(string(fullMessage)), "\n")
msgLines := strings.Split(strings.TrimSpace(fullMessage), "\n")
if len(msgLines) == 0 {
return template.HTML("")
}
@ -843,7 +843,7 @@ func RenderNote(ctx context.Context, msg, urlPrefix string, metas map[string]str
log.Error("RenderNote: %v", err)
return ""
}
return template.HTML(string(fullMessage))
return template.HTML(fullMessage)
}
// IsMultilineCommitMessage checks to see if a commit message contains multiple lines.

View file

@ -17,8 +17,8 @@ func TestSubjectBodySeparator(t *testing.T) {
assert.Empty(t, subject, "no subject found, but one expected")
assert.Equal(t, body, input)
} else {
assert.Equal(t, subject, string(input[0:loc[0]]))
assert.Equal(t, body, string(input[loc[1]:]))
assert.Equal(t, subject, input[0:loc[0]])
assert.Equal(t, body, input[loc[1]:])
}
}

View file

@ -30,7 +30,7 @@ func round(s float64) int64 {
}
func computeTimeDiffFloor(diff int64, lang string) (int64, string) {
diffStr := ""
var diffStr string
switch {
case diff <= 0:
diff = 0
@ -88,7 +88,7 @@ func computeTimeDiffFloor(diff int64, lang string) (int64, string) {
}
func computeTimeDiff(diff int64, lang string) (int64, string) {
diffStr := ""
var diffStr string
switch {
case diff <= 0:
diff = 0

View file

@ -57,7 +57,7 @@ func (ts TimeStamp) AsTime() (tm time.Time) {
// AsTimeInLocation convert timestamp as time.Time in Local locale
func (ts TimeStamp) AsTimeInLocation(loc *time.Location) (tm time.Time) {
tm = time.Unix(int64(ts), 0).In(loc)
return
return tm
}
// AsTimePtr convert timestamp as *time.Time in Local locale

View file

@ -16,5 +16,5 @@ func ReadAtMost(r io.Reader, buf []byte) (n int, err error) {
if err == io.EOF || err == io.ErrUnexpectedEOF {
err = nil
}
return
return n, err
}

View file

@ -28,7 +28,7 @@ func convertHandler(handler interface{}) wrappedHandlerFunc {
if r, ok := resp.(context.ResponseWriter); ok && r.Status() > 0 {
done = true
}
return
return done, deferrable
}
case func(http.ResponseWriter, *http.Request):
return func(resp http.ResponseWriter, req *http.Request, others ...wrappedHandlerFunc) (done bool, deferrable func()) {
@ -37,7 +37,7 @@ func convertHandler(handler interface{}) wrappedHandlerFunc {
if r, ok := resp.(context.ResponseWriter); ok && r.Status() > 0 {
done = true
}
return
return done, deferrable
}
case func(ctx *context.Context):
@ -46,7 +46,7 @@ func convertHandler(handler interface{}) wrappedHandlerFunc {
ctx := context.GetContext(req)
t(ctx)
done = ctx.Written()
return
return done, deferrable
}
case func(ctx *context.Context) goctx.CancelFunc:
return func(resp http.ResponseWriter, req *http.Request, others ...wrappedHandlerFunc) (done bool, deferrable func()) {
@ -54,7 +54,7 @@ func convertHandler(handler interface{}) wrappedHandlerFunc {
ctx := context.GetContext(req)
deferrable = t(ctx)
done = ctx.Written()
return
return done, deferrable
}
case func(*context.APIContext):
return func(resp http.ResponseWriter, req *http.Request, others ...wrappedHandlerFunc) (done bool, deferrable func()) {
@ -62,7 +62,7 @@ func convertHandler(handler interface{}) wrappedHandlerFunc {
ctx := context.GetAPIContext(req)
t(ctx)
done = ctx.Written()
return
return done, deferrable
}
case func(*context.APIContext) goctx.CancelFunc:
return func(resp http.ResponseWriter, req *http.Request, others ...wrappedHandlerFunc) (done bool, deferrable func()) {
@ -70,7 +70,7 @@ func convertHandler(handler interface{}) wrappedHandlerFunc {
ctx := context.GetAPIContext(req)
deferrable = t(ctx)
done = ctx.Written()
return
return done, deferrable
}
case func(*context.PrivateContext):
return func(resp http.ResponseWriter, req *http.Request, others ...wrappedHandlerFunc) (done bool, deferrable func()) {
@ -78,7 +78,7 @@ func convertHandler(handler interface{}) wrappedHandlerFunc {
ctx := context.GetPrivateContext(req)
t(ctx)
done = ctx.Written()
return
return done, deferrable
}
case func(*context.PrivateContext) goctx.CancelFunc:
return func(resp http.ResponseWriter, req *http.Request, others ...wrappedHandlerFunc) (done bool, deferrable func()) {
@ -86,7 +86,7 @@ func convertHandler(handler interface{}) wrappedHandlerFunc {
ctx := context.GetPrivateContext(req)
deferrable = t(ctx)
done = ctx.Written()
return
return done, deferrable
}
case func(http.Handler) http.Handler:
return func(resp http.ResponseWriter, req *http.Request, others ...wrappedHandlerFunc) (done bool, deferrable func()) {
@ -102,7 +102,7 @@ func convertHandler(handler interface{}) wrappedHandlerFunc {
if r, ok := resp.(context.ResponseWriter); ok && r.Status() > 0 {
done = true
}
return
return done, deferrable
}
default:
panic(fmt.Sprintf("Unsupported handler type: %#v", t))