Show syntax lexer name in file view/blame (#21814)

Show which Chroma Lexer is used to highlight the file in the file
header. It's useful for development to see what was detected, and I
think it's not bad info to have for the user:

<img width="233" alt="Screenshot 2022-11-14 at 22 31 16"
src="https://user-images.githubusercontent.com/115237/201770854-44933dfc-70a4-487c-8457-1bb3cc43ea62.png">
<img width="226" alt="Screenshot 2022-11-14 at 22 36 06"
src="https://user-images.githubusercontent.com/115237/201770856-9260ce6f-6c0f-442c-92b5-201e5b113188.png">
<img width="194" alt="Screenshot 2022-11-14 at 22 36 26"
src="https://user-images.githubusercontent.com/115237/201770857-6f56591b-80ea-42cc-8ea5-21b9156c018b.png">

Also, I improved the way this header overflows on small screens:

<img width="354" alt="Screenshot 2022-11-14 at 22 44 36"
src="https://user-images.githubusercontent.com/115237/201774828-2ddbcde1-da15-403f-bf7a-6248449fa2c5.png">

Co-authored-by: delvh <dev.lh@web.de>
Co-authored-by: Lauris BH <lauris@nix.lv>
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
Co-authored-by: John Olheiser <john.olheiser@gmail.com>
This commit is contained in:
silverwind 2022-11-19 12:08:06 +01:00 committed by GitHub
parent 044c754ea5
commit eec1c71880
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 132 additions and 72 deletions

View file

@ -18,6 +18,7 @@ import (
"code.gitea.io/gitea/modules/analyze"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"github.com/alecthomas/chroma/v2"
"github.com/alecthomas/chroma/v2/formatters/html"
@ -56,18 +57,18 @@ func NewContext() {
})
}
// Code returns a HTML version of code string with chroma syntax highlighting classes
func Code(fileName, language, code string) string {
// Code returns a HTML version of code string with chroma syntax highlighting classes and the matched lexer name
func Code(fileName, language, code string) (string, string) {
NewContext()
// diff view newline will be passed as empty, change to literal '\n' so it can be copied
// preserve literal newline in blame view
if code == "" || code == "\n" {
return "\n"
return "\n", ""
}
if len(code) > sizeLimit {
return code
return code, ""
}
var lexer chroma.Lexer
@ -103,7 +104,10 @@ func Code(fileName, language, code string) string {
}
cache.Add(fileName, lexer)
}
return CodeFromLexer(lexer, code)
lexerName := formatLexerName(lexer.Config().Name)
return CodeFromLexer(lexer, code), lexerName
}
// CodeFromLexer returns a HTML version of code string with chroma syntax highlighting classes
@ -134,12 +138,12 @@ func CodeFromLexer(lexer chroma.Lexer, code string) string {
return strings.TrimSuffix(htmlbuf.String(), "\n")
}
// File returns a slice of chroma syntax highlighted HTML lines of code
func File(fileName, language string, code []byte) ([]string, error) {
// File returns a slice of chroma syntax highlighted HTML lines of code and the matched lexer name
func File(fileName, language string, code []byte) ([]string, string, error) {
NewContext()
if len(code) > sizeLimit {
return PlainText(code), nil
return PlainText(code), "", nil
}
formatter := html.New(html.WithClasses(true),
@ -172,9 +176,11 @@ func File(fileName, language string, code []byte) ([]string, error) {
}
}
lexerName := formatLexerName(lexer.Config().Name)
iterator, err := lexer.Tokenise(nil, string(code))
if err != nil {
return nil, fmt.Errorf("can't tokenize code: %w", err)
return nil, "", fmt.Errorf("can't tokenize code: %w", err)
}
tokensLines := chroma.SplitTokensIntoLines(iterator.Tokens())
@ -185,13 +191,13 @@ func File(fileName, language string, code []byte) ([]string, error) {
iterator = chroma.Literator(tokens...)
err = formatter.Format(htmlBuf, styles.GitHub, iterator)
if err != nil {
return nil, fmt.Errorf("can't format code: %w", err)
return nil, "", fmt.Errorf("can't format code: %w", err)
}
lines = append(lines, htmlBuf.String())
htmlBuf.Reset()
}
return lines, nil
return lines, lexerName, nil
}
// PlainText returns non-highlighted HTML for code
@ -212,3 +218,11 @@ func PlainText(code []byte) []string {
}
return m
}
func formatLexerName(name string) string {
if name == "fallback" {
return "Plaintext"
}
return util.ToTitleCaseNoLower(name)
}

View file

@ -17,34 +17,52 @@ func lines(s string) []string {
func TestFile(t *testing.T) {
tests := []struct {
name string
code string
want []string
name string
code string
want []string
lexerName string
}{
{
name: "empty.py",
code: "",
want: lines(""),
name: "empty.py",
code: "",
want: lines(""),
lexerName: "Python",
},
{
name: "tags.txt",
code: "<>",
want: lines("&lt;&gt;"),
name: "empty.js",
code: "",
want: lines(""),
lexerName: "JavaScript",
},
{
name: "tags.py",
code: "<>",
want: lines(`<span class="o">&lt;</span><span class="o">&gt;</span>`),
name: "empty.yaml",
code: "",
want: lines(""),
lexerName: "YAML",
},
{
name: "eol-no.py",
code: "a=1",
want: lines(`<span class="n">a</span><span class="o">=</span><span class="mi">1</span>`),
name: "tags.txt",
code: "<>",
want: lines("&lt;&gt;"),
lexerName: "Plaintext",
},
{
name: "eol-newline1.py",
code: "a=1\n",
want: lines(`<span class="n">a</span><span class="o">=</span><span class="mi">1</span>\n`),
name: "tags.py",
code: "<>",
want: lines(`<span class="o">&lt;</span><span class="o">&gt;</span>`),
lexerName: "Python",
},
{
name: "eol-no.py",
code: "a=1",
want: lines(`<span class="n">a</span><span class="o">=</span><span class="mi">1</span>`),
lexerName: "Python",
},
{
name: "eol-newline1.py",
code: "a=1\n",
want: lines(`<span class="n">a</span><span class="o">=</span><span class="mi">1</span>\n`),
lexerName: "Python",
},
{
name: "eol-newline2.py",
@ -54,6 +72,7 @@ func TestFile(t *testing.T) {
\n
`,
),
lexerName: "Python",
},
{
name: "empty-line-with-space.py",
@ -73,17 +92,19 @@ c=2
\n
<span class="n">c</span><span class="o">=</span><span class="mi">2</span>`,
),
lexerName: "Python",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
out, err := File(tt.name, "", []byte(tt.code))
out, lexerName, err := File(tt.name, "", []byte(tt.code))
assert.NoError(t, err)
expected := strings.Join(tt.want, "\n")
actual := strings.Join(out, "\n")
assert.Equal(t, strings.Count(actual, "<span"), strings.Count(actual, "</span>"))
assert.EqualValues(t, expected, actual)
assert.Equal(t, tt.lexerName, lexerName)
})
}
}

View file

@ -94,6 +94,9 @@ func searchResult(result *SearchResult, startIndex, endIndex int) (*Result, erro
lineNumbers[i] = startLineNum + i
index += len(line)
}
highlighted, _ := highlight.Code(result.Filename, "", formattedLinesBuffer.String())
return &Result{
RepoID: result.RepoID,
Filename: result.Filename,
@ -102,7 +105,7 @@ func searchResult(result *SearchResult, startIndex, endIndex int) (*Result, erro
Language: result.Language,
Color: result.Color,
LineNumbers: lineNumbers,
FormattedLines: highlight.Code(result.Filename, "", formattedLinesBuffer.String()),
FormattedLines: highlighted,
}, nil
}

View file

@ -186,13 +186,21 @@ func ToUpperASCII(s string) string {
return string(b)
}
var titleCaser = cases.Title(language.English)
var (
titleCaser = cases.Title(language.English)
titleCaserNoLower = cases.Title(language.English, cases.NoLower)
)
// ToTitleCase returns s with all english words capitalized
func ToTitleCase(s string) string {
return titleCaser.String(s)
}
// ToTitleCaseNoLower returns s with all english words capitalized without lowercasing
func ToTitleCaseNoLower(s string) string {
return titleCaserNoLower.String(s)
}
var (
whitespaceOnly = regexp.MustCompile("(?m)^[ \t]+$")
leadingWhitespace = regexp.MustCompile("(?m)(^[ \t]*)(?:[^ \t\n])")