Merge pull request '[gitea] week 2024-25 cherry pick (gitea/main -> forgejo)' (#4145) from earl-warren/wcp/2024-25 into forgejo
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/4145 Reviewed-by: twenty-panda <twenty-panda@noreply.codeberg.org>
This commit is contained in:
commit
328b5d79d3
69 changed files with 1069 additions and 282 deletions
|
@ -18,7 +18,7 @@ func parseIntParam(value, param, algorithmName, config string, previousErr error
|
|||
return parsed, previousErr // <- Keep the previous error as this function should still return an error once everything has been checked if any call failed
|
||||
}
|
||||
|
||||
func parseUIntParam(value, param, algorithmName, config string, previousErr error) (uint64, error) {
|
||||
func parseUIntParam(value, param, algorithmName, config string, previousErr error) (uint64, error) { //nolint:unparam
|
||||
parsed, err := strconv.ParseUint(value, 10, 64)
|
||||
if err != nil {
|
||||
log.Error("invalid integer for %s representation in %s hash spec %s", param, algorithmName, config)
|
||||
|
|
|
@ -42,20 +42,19 @@ var (
|
|||
)
|
||||
|
||||
// loadGitVersion returns current Git version from shell. Internal usage only.
|
||||
func loadGitVersion() (*version.Version, error) {
|
||||
func loadGitVersion() error {
|
||||
// doesn't need RWMutex because it's executed by Init()
|
||||
if gitVersion != nil {
|
||||
return gitVersion, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
stdout, _, runErr := NewCommand(DefaultContext, "version").RunStdString(nil)
|
||||
if runErr != nil {
|
||||
return nil, runErr
|
||||
return runErr
|
||||
}
|
||||
|
||||
fields := strings.Fields(stdout)
|
||||
if len(fields) < 3 {
|
||||
return nil, fmt.Errorf("invalid git version output: %s", stdout)
|
||||
return fmt.Errorf("invalid git version output: %s", stdout)
|
||||
}
|
||||
|
||||
var versionString string
|
||||
|
@ -70,7 +69,7 @@ func loadGitVersion() (*version.Version, error) {
|
|||
|
||||
var err error
|
||||
gitVersion, err = version.NewVersion(versionString)
|
||||
return gitVersion, err
|
||||
return err
|
||||
}
|
||||
|
||||
// SetExecutablePath changes the path of git executable and checks the file permission and version.
|
||||
|
@ -85,7 +84,7 @@ func SetExecutablePath(path string) error {
|
|||
}
|
||||
GitExecutable = absPath
|
||||
|
||||
_, err = loadGitVersion()
|
||||
err = loadGitVersion()
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to load git version: %w", err)
|
||||
}
|
||||
|
@ -312,7 +311,7 @@ func syncGitConfig() (err error) {
|
|||
|
||||
// CheckGitVersionAtLeast check git version is at least the constraint version
|
||||
func CheckGitVersionAtLeast(atLeast string) error {
|
||||
if _, err := loadGitVersion(); err != nil {
|
||||
if err := loadGitVersion(); err != nil {
|
||||
return err
|
||||
}
|
||||
atLeastVersion, err := version.NewVersion(atLeast)
|
||||
|
@ -327,7 +326,7 @@ func CheckGitVersionAtLeast(atLeast string) error {
|
|||
|
||||
// CheckGitVersionEqual checks if the git version is equal to the constraint version.
|
||||
func CheckGitVersionEqual(equal string) error {
|
||||
if _, err := loadGitVersion(); err != nil {
|
||||
if err := loadGitVersion(); err != nil {
|
||||
return err
|
||||
}
|
||||
atLeastVersion, err := version.NewVersion(equal)
|
||||
|
|
|
@ -34,13 +34,13 @@ type ObjectFormat interface {
|
|||
ComputeHash(t ObjectType, content []byte) ObjectID
|
||||
}
|
||||
|
||||
func computeHash(dst []byte, hasher hash.Hash, t ObjectType, content []byte) []byte {
|
||||
func computeHash(dst []byte, hasher hash.Hash, t ObjectType, content []byte) {
|
||||
_, _ = hasher.Write(t.Bytes())
|
||||
_, _ = hasher.Write([]byte(" "))
|
||||
_, _ = hasher.Write([]byte(strconv.Itoa(len(content))))
|
||||
_, _ = hasher.Write([]byte{0})
|
||||
_, _ = hasher.Write(content)
|
||||
return hasher.Sum(dst)
|
||||
hasher.Sum(dst)
|
||||
}
|
||||
|
||||
/* SHA1 Type */
|
||||
|
|
|
@ -38,6 +38,12 @@ func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOp
|
|||
searchOpt.MilestoneIDs = opts.MilestoneIDs
|
||||
}
|
||||
|
||||
if opts.ProjectID > 0 {
|
||||
searchOpt.ProjectID = optional.Some(opts.ProjectID)
|
||||
} else if opts.ProjectID == -1 { // FIXME: this is inconsistent from other places
|
||||
searchOpt.ProjectID = optional.Some[int64](0) // Those issues with no project(projectid==0)
|
||||
}
|
||||
|
||||
// See the comment of issues_model.SearchOptions for the reason why we need to convert
|
||||
convertID := func(id int64) optional.Option[int64] {
|
||||
if id > 0 {
|
||||
|
@ -49,7 +55,6 @@ func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOp
|
|||
return nil
|
||||
}
|
||||
|
||||
searchOpt.ProjectID = convertID(opts.ProjectID)
|
||||
searchOpt.ProjectColumnID = convertID(opts.ProjectColumnID)
|
||||
searchOpt.PosterID = convertID(opts.PosterID)
|
||||
searchOpt.AssigneeID = convertID(opts.AssigneeID)
|
||||
|
|
|
@ -211,7 +211,7 @@ func createRequest(ctx context.Context, method, url string, headers map[string]s
|
|||
for key, value := range headers {
|
||||
req.Header.Set(key, value)
|
||||
}
|
||||
req.Header.Set("Accept", MediaType)
|
||||
req.Header.Set("Accept", AcceptHeader)
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
@ -251,6 +251,6 @@ func handleErrorResponse(resp *http.Response) error {
|
|||
return err
|
||||
}
|
||||
|
||||
log.Trace("ErrorResponse: %v", er)
|
||||
log.Trace("ErrorResponse(%v): %v", resp.Status, er)
|
||||
return errors.New(er.Message)
|
||||
}
|
||||
|
|
|
@ -155,7 +155,7 @@ func TestHTTPClientDownload(t *testing.T) {
|
|||
hc := &http.Client{Transport: RoundTripFunc(func(req *http.Request) *http.Response {
|
||||
assert.Equal(t, "POST", req.Method)
|
||||
assert.Equal(t, MediaType, req.Header.Get("Content-type"))
|
||||
assert.Equal(t, MediaType, req.Header.Get("Accept"))
|
||||
assert.Equal(t, AcceptHeader, req.Header.Get("Accept"))
|
||||
|
||||
var batchRequest BatchRequest
|
||||
err := json.NewDecoder(req.Body).Decode(&batchRequest)
|
||||
|
@ -263,7 +263,7 @@ func TestHTTPClientUpload(t *testing.T) {
|
|||
hc := &http.Client{Transport: RoundTripFunc(func(req *http.Request) *http.Response {
|
||||
assert.Equal(t, "POST", req.Method)
|
||||
assert.Equal(t, MediaType, req.Header.Get("Content-type"))
|
||||
assert.Equal(t, MediaType, req.Header.Get("Accept"))
|
||||
assert.Equal(t, AcceptHeader, req.Header.Get("Accept"))
|
||||
|
||||
var batchRequest BatchRequest
|
||||
err := json.NewDecoder(req.Body).Decode(&batchRequest)
|
||||
|
|
|
@ -10,6 +10,8 @@ import (
|
|||
const (
|
||||
// MediaType contains the media type for LFS server requests
|
||||
MediaType = "application/vnd.git-lfs+json"
|
||||
// Some LFS servers offer content with other types, so fallback to '*/*' if application/vnd.git-lfs+json cannot be served
|
||||
AcceptHeader = "application/vnd.git-lfs+json;q=0.9, */*;q=0.8"
|
||||
)
|
||||
|
||||
// BatchRequest contains multiple requests processed in one batch operation.
|
||||
|
|
|
@ -37,6 +37,7 @@ func (a *BasicTransferAdapter) Download(ctx context.Context, l *Link) (io.ReadCl
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debug("Download Request: %+v", req)
|
||||
resp, err := performRequest(ctx, a.client, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
@ -26,7 +26,7 @@ func TestBasicTransferAdapter(t *testing.T) {
|
|||
p := Pointer{Oid: "b5a2c96250612366ea272ffac6d9744aaf4b45aacd96aa7cfcb931ee3b558259", Size: 5}
|
||||
|
||||
roundTripHandler := func(req *http.Request) *http.Response {
|
||||
assert.Equal(t, MediaType, req.Header.Get("Accept"))
|
||||
assert.Equal(t, AcceptHeader, req.Header.Get("Accept"))
|
||||
assert.Equal(t, "test-value", req.Header.Get("test-header"))
|
||||
|
||||
url := req.URL.String()
|
||||
|
|
|
@ -48,7 +48,7 @@ var (
|
|||
// hashCurrentPattern matches string that represents a commit SHA, e.g. d8a994ef243349f321568f9e36d5c3f444b99cae
|
||||
// Although SHA1 hashes are 40 chars long, SHA256 are 64, the regex matches the hash from 7 to 64 chars in length
|
||||
// so that abbreviated hash links can be used as well. This matches git and GitHub usability.
|
||||
hashCurrentPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-f]{7,64})(?:\s|$|\)|\]|[.,](\s|$))`)
|
||||
hashCurrentPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-f]{7,64})(?:\s|$|\)|\]|[.,:](\s|$))`)
|
||||
|
||||
// shortLinkPattern matches short but difficult to parse [[name|link|arg=test]] syntax
|
||||
shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`)
|
||||
|
|
|
@ -381,6 +381,7 @@ func TestRegExp_sha1CurrentPattern(t *testing.T) {
|
|||
"(abcdefabcdefabcdefabcdefabcdefabcdefabcd)",
|
||||
"[abcdefabcdefabcdefabcdefabcdefabcdefabcd]",
|
||||
"abcdefabcdefabcdefabcdefabcdefabcdefabcd.",
|
||||
"abcdefabcdefabcdefabcdefabcdefabcdefabcd:",
|
||||
}
|
||||
falseTestCases := []string{
|
||||
"test",
|
||||
|
|
|
@ -9,9 +9,9 @@ import (
|
|||
|
||||
"code.gitea.io/gitea/modules/container"
|
||||
"code.gitea.io/gitea/modules/markup/common"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type prefixedIDs struct {
|
||||
|
@ -36,7 +36,7 @@ func (p *prefixedIDs) GenerateWithDefault(value, dft []byte) []byte {
|
|||
if !bytes.HasPrefix(result, []byte("user-content-")) {
|
||||
result = append([]byte("user-content-"), result...)
|
||||
}
|
||||
if p.values.Add(util.BytesToReadOnlyString(result)) {
|
||||
if p.values.Add(util.UnsafeBytesToString(result)) {
|
||||
return result
|
||||
}
|
||||
for i := 1; ; i++ {
|
||||
|
@ -49,7 +49,7 @@ func (p *prefixedIDs) GenerateWithDefault(value, dft []byte) []byte {
|
|||
|
||||
// Put puts a given element id to the used ids table.
|
||||
func (p *prefixedIDs) Put(value []byte) {
|
||||
p.values.Add(util.BytesToReadOnlyString(value))
|
||||
p.values.Add(util.UnsafeBytesToString(value))
|
||||
}
|
||||
|
||||
func newPrefixedIDs() *prefixedIDs {
|
||||
|
|
|
@ -48,7 +48,7 @@ func (r *HTMLRenderer) renderCodeSpan(w util.BufWriter, source []byte, n ast.Nod
|
|||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func (g *ASTTransformer) transformCodeSpan(ctx *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) {
|
||||
func (g *ASTTransformer) transformCodeSpan(_ *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) {
|
||||
colorContent := v.Text(reader.Source())
|
||||
if matchColor(strings.ToLower(string(colorContent))) {
|
||||
v.AppendChild(v, NewColorPreview(colorContent))
|
||||
|
|
|
@ -7,10 +7,10 @@ import (
|
|||
"fmt"
|
||||
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
func (g *ASTTransformer) transformHeading(_ *markup.RenderContext, v *ast.Heading, reader text.Reader, tocList *[]markup.Header) {
|
||||
|
@ -21,11 +21,11 @@ func (g *ASTTransformer) transformHeading(_ *markup.RenderContext, v *ast.Headin
|
|||
}
|
||||
txt := v.Text(reader.Source())
|
||||
header := markup.Header{
|
||||
Text: util.BytesToReadOnlyString(txt),
|
||||
Text: util.UnsafeBytesToString(txt),
|
||||
Level: v.Level,
|
||||
}
|
||||
if id, found := v.AttributeString("id"); found {
|
||||
header.ID = util.BytesToReadOnlyString(id.([]byte))
|
||||
header.ID = util.UnsafeBytesToString(id.([]byte))
|
||||
}
|
||||
*tocList = append(*tocList, header)
|
||||
g.applyElementDir(v)
|
||||
|
|
|
@ -6,6 +6,7 @@ package composer
|
|||
import (
|
||||
"archive/zip"
|
||||
"io"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
|
@ -36,10 +37,14 @@ type Package struct {
|
|||
Metadata *Metadata
|
||||
}
|
||||
|
||||
// https://getcomposer.org/doc/04-schema.md
|
||||
|
||||
// Metadata represents the metadata of a Composer package
|
||||
type Metadata struct {
|
||||
Description string `json:"description,omitempty"`
|
||||
Readme string `json:"readme,omitempty"`
|
||||
Keywords []string `json:"keywords,omitempty"`
|
||||
Comments Comments `json:"_comments,omitempty"`
|
||||
Homepage string `json:"homepage,omitempty"`
|
||||
License Licenses `json:"license,omitempty"`
|
||||
Authors []Author `json:"authors,omitempty"`
|
||||
|
@ -74,6 +79,28 @@ func (l *Licenses) UnmarshalJSON(data []byte) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// Comments represents the comments of a Composer package
|
||||
type Comments []string
|
||||
|
||||
// UnmarshalJSON reads from a string or array
|
||||
func (c *Comments) UnmarshalJSON(data []byte) error {
|
||||
switch data[0] {
|
||||
case '"':
|
||||
var value string
|
||||
if err := json.Unmarshal(data, &value); err != nil {
|
||||
return err
|
||||
}
|
||||
*c = Comments{value}
|
||||
case '[':
|
||||
values := make([]string, 0, 5)
|
||||
if err := json.Unmarshal(data, &values); err != nil {
|
||||
return err
|
||||
}
|
||||
*c = Comments(values)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Author represents an author
|
||||
type Author struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
|
@ -101,14 +128,14 @@ func ParsePackage(r io.ReaderAt, size int64) (*Package, error) {
|
|||
}
|
||||
defer f.Close()
|
||||
|
||||
return ParseComposerFile(f)
|
||||
return ParseComposerFile(archive, path.Dir(file.Name), f)
|
||||
}
|
||||
}
|
||||
return nil, ErrMissingComposerFile
|
||||
}
|
||||
|
||||
// ParseComposerFile parses a composer.json file to retrieve the metadata of a Composer package
|
||||
func ParseComposerFile(r io.Reader) (*Package, error) {
|
||||
func ParseComposerFile(archive *zip.Reader, pathPrefix string, r io.Reader) (*Package, error) {
|
||||
var cj struct {
|
||||
Name string `json:"name"`
|
||||
Version string `json:"version"`
|
||||
|
@ -137,6 +164,19 @@ func ParseComposerFile(r io.Reader) (*Package, error) {
|
|||
cj.Type = "library"
|
||||
}
|
||||
|
||||
if cj.Readme == "" {
|
||||
cj.Readme = "README.md"
|
||||
}
|
||||
f, err := archive.Open(path.Join(pathPrefix, cj.Readme))
|
||||
if err == nil {
|
||||
// 10kb limit for readme content
|
||||
buf, _ := io.ReadAll(io.LimitReader(f, 10*1024))
|
||||
cj.Readme = string(buf)
|
||||
_ = f.Close()
|
||||
} else {
|
||||
cj.Readme = ""
|
||||
}
|
||||
|
||||
return &Package{
|
||||
Name: cj.Name,
|
||||
Version: cj.Version,
|
||||
|
|
|
@ -17,6 +17,8 @@ import (
|
|||
const (
|
||||
name = "gitea/composer-package"
|
||||
description = "Package Description"
|
||||
readme = "Package Readme"
|
||||
comments = "Package Comment"
|
||||
packageType = "composer-plugin"
|
||||
author = "Gitea Authors"
|
||||
email = "no.reply@gitea.io"
|
||||
|
@ -41,7 +43,8 @@ const composerContent = `{
|
|||
},
|
||||
"require": {
|
||||
"php": ">=7.2 || ^8.0"
|
||||
}
|
||||
},
|
||||
"_comments": "` + comments + `"
|
||||
}`
|
||||
|
||||
func TestLicenseUnmarshal(t *testing.T) {
|
||||
|
@ -54,18 +57,30 @@ func TestLicenseUnmarshal(t *testing.T) {
|
|||
assert.Equal(t, "MIT", l[0])
|
||||
}
|
||||
|
||||
func TestCommentsUnmarshal(t *testing.T) {
|
||||
var c Comments
|
||||
assert.NoError(t, json.NewDecoder(strings.NewReader(`["comment"]`)).Decode(&c))
|
||||
assert.Len(t, c, 1)
|
||||
assert.Equal(t, "comment", c[0])
|
||||
assert.NoError(t, json.NewDecoder(strings.NewReader(`"comment"`)).Decode(&c))
|
||||
assert.Len(t, c, 1)
|
||||
assert.Equal(t, "comment", c[0])
|
||||
}
|
||||
|
||||
func TestParsePackage(t *testing.T) {
|
||||
createArchive := func(name, content string) []byte {
|
||||
createArchive := func(files map[string]string) []byte {
|
||||
var buf bytes.Buffer
|
||||
archive := zip.NewWriter(&buf)
|
||||
w, _ := archive.Create(name)
|
||||
w.Write([]byte(content))
|
||||
for name, content := range files {
|
||||
w, _ := archive.Create(name)
|
||||
w.Write([]byte(content))
|
||||
}
|
||||
archive.Close()
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
t.Run("MissingComposerFile", func(t *testing.T) {
|
||||
data := createArchive("dummy.txt", "")
|
||||
data := createArchive(map[string]string{"dummy.txt": ""})
|
||||
|
||||
cp, err := ParsePackage(bytes.NewReader(data), int64(len(data)))
|
||||
assert.Nil(t, cp)
|
||||
|
@ -73,7 +88,7 @@ func TestParsePackage(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("MissingComposerFileInRoot", func(t *testing.T) {
|
||||
data := createArchive("sub/sub/composer.json", "")
|
||||
data := createArchive(map[string]string{"sub/sub/composer.json": ""})
|
||||
|
||||
cp, err := ParsePackage(bytes.NewReader(data), int64(len(data)))
|
||||
assert.Nil(t, cp)
|
||||
|
@ -81,43 +96,52 @@ func TestParsePackage(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("InvalidComposerFile", func(t *testing.T) {
|
||||
data := createArchive("composer.json", "")
|
||||
data := createArchive(map[string]string{"composer.json": ""})
|
||||
|
||||
cp, err := ParsePackage(bytes.NewReader(data), int64(len(data)))
|
||||
assert.Nil(t, cp)
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("Valid", func(t *testing.T) {
|
||||
data := createArchive("composer.json", composerContent)
|
||||
t.Run("InvalidPackageName", func(t *testing.T) {
|
||||
data := createArchive(map[string]string{"composer.json": "{}"})
|
||||
|
||||
cp, err := ParsePackage(bytes.NewReader(data), int64(len(data)))
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, cp)
|
||||
})
|
||||
}
|
||||
|
||||
func TestParseComposerFile(t *testing.T) {
|
||||
t.Run("InvalidPackageName", func(t *testing.T) {
|
||||
cp, err := ParseComposerFile(strings.NewReader(`{}`))
|
||||
assert.Nil(t, cp)
|
||||
assert.ErrorIs(t, err, ErrInvalidName)
|
||||
})
|
||||
|
||||
t.Run("InvalidPackageVersion", func(t *testing.T) {
|
||||
cp, err := ParseComposerFile(strings.NewReader(`{"name": "gitea/composer-package", "version": "1.a.3"}`))
|
||||
data := createArchive(map[string]string{"composer.json": `{"name": "gitea/composer-package", "version": "1.a.3"}`})
|
||||
|
||||
cp, err := ParsePackage(bytes.NewReader(data), int64(len(data)))
|
||||
assert.Nil(t, cp)
|
||||
assert.ErrorIs(t, err, ErrInvalidVersion)
|
||||
})
|
||||
|
||||
t.Run("InvalidReadmePath", func(t *testing.T) {
|
||||
data := createArchive(map[string]string{"composer.json": `{"name": "gitea/composer-package", "readme": "sub/README.md"}`})
|
||||
|
||||
cp, err := ParsePackage(bytes.NewReader(data), int64(len(data)))
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, cp)
|
||||
|
||||
assert.Empty(t, cp.Metadata.Readme)
|
||||
})
|
||||
|
||||
t.Run("Valid", func(t *testing.T) {
|
||||
cp, err := ParseComposerFile(strings.NewReader(composerContent))
|
||||
data := createArchive(map[string]string{"composer.json": composerContent, "README.md": readme})
|
||||
|
||||
cp, err := ParsePackage(bytes.NewReader(data), int64(len(data)))
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, cp)
|
||||
|
||||
assert.Equal(t, name, cp.Name)
|
||||
assert.Empty(t, cp.Version)
|
||||
assert.Equal(t, description, cp.Metadata.Description)
|
||||
assert.Equal(t, readme, cp.Metadata.Readme)
|
||||
assert.Len(t, cp.Metadata.Comments, 1)
|
||||
assert.Equal(t, comments, cp.Metadata.Comments[0])
|
||||
assert.Len(t, cp.Metadata.Authors, 1)
|
||||
assert.Equal(t, author, cp.Metadata.Authors[0].Name)
|
||||
assert.Equal(t, email, cp.Metadata.Authors[0].Email)
|
||||
|
|
|
@ -185,8 +185,6 @@ func ParseDescription(r io.Reader) (*Package, error) {
|
|||
}
|
||||
|
||||
func setField(p *Package, data string) error {
|
||||
const listDelimiter = ", "
|
||||
|
||||
if data == "" {
|
||||
return nil
|
||||
}
|
||||
|
@ -215,19 +213,19 @@ func setField(p *Package, data string) error {
|
|||
case "Description":
|
||||
p.Metadata.Description = value
|
||||
case "URL":
|
||||
p.Metadata.ProjectURL = splitAndTrim(value, listDelimiter)
|
||||
p.Metadata.ProjectURL = splitAndTrim(value)
|
||||
case "License":
|
||||
p.Metadata.License = value
|
||||
case "Author":
|
||||
p.Metadata.Authors = splitAndTrim(authorReplacePattern.ReplaceAllString(value, ""), listDelimiter)
|
||||
p.Metadata.Authors = splitAndTrim(authorReplacePattern.ReplaceAllString(value, ""))
|
||||
case "Depends":
|
||||
p.Metadata.Depends = splitAndTrim(value, listDelimiter)
|
||||
p.Metadata.Depends = splitAndTrim(value)
|
||||
case "Imports":
|
||||
p.Metadata.Imports = splitAndTrim(value, listDelimiter)
|
||||
p.Metadata.Imports = splitAndTrim(value)
|
||||
case "Suggests":
|
||||
p.Metadata.Suggests = splitAndTrim(value, listDelimiter)
|
||||
p.Metadata.Suggests = splitAndTrim(value)
|
||||
case "LinkingTo":
|
||||
p.Metadata.LinkingTo = splitAndTrim(value, listDelimiter)
|
||||
p.Metadata.LinkingTo = splitAndTrim(value)
|
||||
case "NeedsCompilation":
|
||||
p.Metadata.NeedsCompilation = value == "yes"
|
||||
}
|
||||
|
@ -235,8 +233,8 @@ func setField(p *Package, data string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func splitAndTrim(s, sep string) []string {
|
||||
items := strings.Split(s, sep)
|
||||
func splitAndTrim(s string) []string {
|
||||
items := strings.Split(s, ", ")
|
||||
for i := range items {
|
||||
items[i] = strings.TrimSpace(items[i])
|
||||
}
|
||||
|
|
|
@ -14,8 +14,7 @@ import (
|
|||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/markup/mdstripper"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
"github.com/yuin/goldmark/util"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -341,7 +340,7 @@ func FindRenderizableReferenceNumeric(content string, prOnly, crossLinkOnly bool
|
|||
return false, nil
|
||||
}
|
||||
}
|
||||
r := getCrossReference(util.StringToReadOnlyBytes(content), match[2], match[3], false, prOnly)
|
||||
r := getCrossReference(util.UnsafeStringToBytes(content), match[2], match[3], false, prOnly)
|
||||
if r == nil {
|
||||
return false, nil
|
||||
}
|
||||
|
|
|
@ -97,7 +97,7 @@ func decodeEnvSectionKey(encoded string) (ok bool, section, key string) {
|
|||
|
||||
// decodeEnvironmentKey decode the environment key to section and key
|
||||
// The environment key is in the form of GITEA__SECTION__KEY or GITEA__SECTION__KEY__FILE
|
||||
func decodeEnvironmentKey(prefixRegexp *regexp.Regexp, suffixFile, envKey string) (ok bool, section, key string, useFileValue bool) {
|
||||
func decodeEnvironmentKey(prefixRegexp *regexp.Regexp, suffixFile, envKey string) (ok bool, section, key string, useFileValue bool) { //nolint:unparam
|
||||
if strings.HasSuffix(envKey, suffixFile) {
|
||||
useFileValue = true
|
||||
envKey = envKey[:len(envKey)-len(suffixFile)]
|
||||
|
|
|
@ -122,7 +122,7 @@ const (
|
|||
targetSecIsSec // target section is from the name seciont [name]
|
||||
)
|
||||
|
||||
func getStorageSectionByType(rootCfg ConfigProvider, typ string) (ConfigSection, targetSecType, error) {
|
||||
func getStorageSectionByType(rootCfg ConfigProvider, typ string) (ConfigSection, targetSecType, error) { //nolint:unparam
|
||||
targetSec, err := rootCfg.GetSection(storageSectionName + "." + typ)
|
||||
if err != nil {
|
||||
if !IsValidStorageType(StorageType(typ)) {
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
|
||||
package structs
|
||||
|
||||
import "time"
|
||||
|
||||
// Tag represents a repository tag
|
||||
type Tag struct {
|
||||
Name string `json:"name"`
|
||||
|
@ -46,3 +48,29 @@ type TagArchiveDownloadCount struct {
|
|||
Zip int64 `json:"zip"`
|
||||
TarGz int64 `json:"tar_gz"`
|
||||
}
|
||||
|
||||
// TagProtection represents a tag protection
|
||||
type TagProtection struct {
|
||||
ID int64 `json:"id"`
|
||||
NamePattern string `json:"name_pattern"`
|
||||
WhitelistUsernames []string `json:"whitelist_usernames"`
|
||||
WhitelistTeams []string `json:"whitelist_teams"`
|
||||
// swagger:strfmt date-time
|
||||
Created time.Time `json:"created_at"`
|
||||
// swagger:strfmt date-time
|
||||
Updated time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// CreateTagProtectionOption options for creating a tag protection
|
||||
type CreateTagProtectionOption struct {
|
||||
NamePattern string `json:"name_pattern"`
|
||||
WhitelistUsernames []string `json:"whitelist_usernames"`
|
||||
WhitelistTeams []string `json:"whitelist_teams"`
|
||||
}
|
||||
|
||||
// EditTagProtectionOption options for editing a tag protection
|
||||
type EditTagProtectionOption struct {
|
||||
NamePattern *string `json:"name_pattern"`
|
||||
WhitelistUsernames []string `json:"whitelist_usernames"`
|
||||
WhitelistTeams []string `json:"whitelist_teams"`
|
||||
}
|
||||
|
|
|
@ -8,8 +8,7 @@ import (
|
|||
|
||||
"code.gitea.io/gitea/models/system"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
|
||||
"github.com/yuin/goldmark/util"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
// DBStore can be used to store app state items in local filesystem
|
||||
|
@ -24,7 +23,7 @@ func (f *DBStore) Get(ctx context.Context, item StateItem) error {
|
|||
if content == "" {
|
||||
return nil
|
||||
}
|
||||
return json.Unmarshal(util.StringToReadOnlyBytes(content), item)
|
||||
return json.Unmarshal(util.UnsafeStringToBytes(content), item)
|
||||
}
|
||||
|
||||
// Set saves the state item
|
||||
|
@ -33,5 +32,5 @@ func (f *DBStore) Set(ctx context.Context, item StateItem) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return system.SaveAppStateContent(ctx, item.Name(), util.BytesToReadOnlyString(b))
|
||||
return system.SaveAppStateContent(ctx, item.Name(), util.UnsafeBytesToString(b))
|
||||
}
|
||||
|
|
|
@ -15,10 +15,7 @@ import (
|
|||
// GenerateKeyPair generates a public and private keypair
|
||||
func GenerateKeyPair(bits int) (string, string, error) {
|
||||
priv, _ := rsa.GenerateKey(rand.Reader, bits)
|
||||
privPem, err := pemBlockForPriv(priv)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
privPem := pemBlockForPriv(priv)
|
||||
pubPem, err := pemBlockForPub(&priv.PublicKey)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
|
@ -26,12 +23,12 @@ func GenerateKeyPair(bits int) (string, string, error) {
|
|||
return privPem, pubPem, nil
|
||||
}
|
||||
|
||||
func pemBlockForPriv(priv *rsa.PrivateKey) (string, error) {
|
||||
func pemBlockForPriv(priv *rsa.PrivateKey) string {
|
||||
privBytes := pem.EncodeToMemory(&pem.Block{
|
||||
Type: "RSA PRIVATE KEY",
|
||||
Bytes: x509.MarshalPKCS1PrivateKey(priv),
|
||||
})
|
||||
return string(privBytes), nil
|
||||
return string(privBytes)
|
||||
}
|
||||
|
||||
func pemBlockForPub(pub *rsa.PublicKey) (string, error) {
|
||||
|
|
|
@ -6,8 +6,6 @@ package util
|
|||
import (
|
||||
"bytes"
|
||||
"unicode"
|
||||
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type sanitizedError struct {
|
||||
|
@ -33,7 +31,7 @@ var schemeSep = []byte("://")
|
|||
|
||||
// SanitizeCredentialURLs remove all credentials in URLs (starting with "scheme://") for the input string: "https://user:pass@domain.com" => "https://sanitized-credential@domain.com"
|
||||
func SanitizeCredentialURLs(s string) string {
|
||||
bs := util.StringToReadOnlyBytes(s)
|
||||
bs := UnsafeStringToBytes(s)
|
||||
schemeSepPos := bytes.Index(bs, schemeSep)
|
||||
if schemeSepPos == -1 || bytes.IndexByte(bs[schemeSepPos:], '@') == -1 {
|
||||
return s // fast return if there is no URL scheme or no userinfo
|
||||
|
@ -70,5 +68,5 @@ func SanitizeCredentialURLs(s string) string {
|
|||
schemeSepPos = bytes.Index(bs, schemeSep)
|
||||
}
|
||||
out = append(out, bs...)
|
||||
return util.BytesToReadOnlyString(out)
|
||||
return UnsafeBytesToString(out)
|
||||
}
|
||||
|
|
|
@ -87,11 +87,11 @@ func ToSnakeCase(input string) string {
|
|||
}
|
||||
|
||||
// UnsafeBytesToString uses Go's unsafe package to convert a byte slice to a string.
|
||||
// TODO: replace all "goldmark/util.BytesToReadOnlyString" with this official approach
|
||||
func UnsafeBytesToString(b []byte) string {
|
||||
return unsafe.String(unsafe.SliceData(b), len(b))
|
||||
}
|
||||
|
||||
// UnsafeStringToBytes uses Go's unsafe package to convert a string to a byte slice.
|
||||
func UnsafeStringToBytes(s string) []byte {
|
||||
return unsafe.Slice(unsafe.StringData(s), len(s))
|
||||
}
|
||||
|
|
|
@ -35,6 +35,10 @@ func GetSiteCookie(req *http.Request, name string) string {
|
|||
|
||||
// SetSiteCookie returns given cookie value from request header.
|
||||
func SetSiteCookie(resp http.ResponseWriter, name, value string, maxAge int) {
|
||||
// Previous versions would use a cookie path with a trailing /.
|
||||
// These are more specific than cookies without a trailing /, so
|
||||
// we need to delete these if they exist.
|
||||
deleteLegacySiteCookie(resp, name)
|
||||
cookie := &http.Cookie{
|
||||
Name: name,
|
||||
Value: url.QueryEscape(value),
|
||||
|
@ -46,10 +50,6 @@ func SetSiteCookie(resp http.ResponseWriter, name, value string, maxAge int) {
|
|||
SameSite: setting.SessionConfig.SameSite,
|
||||
}
|
||||
resp.Header().Add("Set-Cookie", cookie.String())
|
||||
// Previous versions would use a cookie path with a trailing /.
|
||||
// These are more specific than cookies without a trailing /, so
|
||||
// we need to delete these if they exist.
|
||||
deleteLegacySiteCookie(resp, name)
|
||||
}
|
||||
|
||||
// deleteLegacySiteCookie deletes the cookie with the given name at the cookie
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue