Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor renders #15175

Merged
merged 27 commits into from
Apr 19, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion contrib/pr/checkout.go
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ func runPR() {

log.Printf("[PR] Setting up router\n")
//routers.GlobalInit()
external.RegisterParsers()
external.RegisterRenderers()
markup.Init()
c := routes.NormalRoutes()

Expand Down
10 changes: 8 additions & 2 deletions models/issue_comment.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import (

"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/markup/markdown"
"code.gitea.io/gitea/modules/references"
"code.gitea.io/gitea/modules/structs"
Expand Down Expand Up @@ -1178,8 +1179,13 @@ func findCodeComments(e Engine, opts FindCommentsOptions, issue *Issue, currentU
return nil, err
}

comment.RenderedContent = string(markdown.Render([]byte(comment.Content), issue.Repo.Link(),
issue.Repo.ComposeMetas()))
var err error
if comment.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
URLPrefix: issue.Repo.Link(),
Metas: issue.Repo.ComposeMetas(),
}, comment.Content); err != nil {
return nil, err
}
}
return comments[:n], nil
}
Expand Down
5 changes: 4 additions & 1 deletion models/repo.go
Original file line number Diff line number Diff line change
Expand Up @@ -863,7 +863,10 @@ func (repo *Repository) getUsersWithAccessMode(e Engine, mode AccessMode) (_ []*

// DescriptionHTML does special handles to description and return HTML string.
func (repo *Repository) DescriptionHTML() template.HTML {
desc, err := markup.RenderDescriptionHTML([]byte(repo.Description), repo.HTMLURL(), repo.ComposeMetas())
desc, err := markup.RenderDescriptionHTML(&markup.RenderContext{
URLPrefix: repo.HTMLURL(),
Metas: repo.ComposeMetas(),
}, repo.Description)
if err != nil {
log.Error("Failed to render description for %s (ID: %d): %v", repo.Name, repo.ID, err)
return template.HTML(markup.Sanitize(repo.Description))
Expand Down
9 changes: 5 additions & 4 deletions models/repo_generate.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,14 @@
package models

import (
"bufio"
"bytes"
"strconv"
"strings"

"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/util"

"github.com/gobwas/glob"
)
Expand Down Expand Up @@ -49,9 +50,9 @@ func (gt GiteaTemplate) Globs() []glob.Glob {
}

gt.globs = make([]glob.Glob, 0)
lines := strings.Split(string(util.NormalizeEOL(gt.Content)), "\n")
for _, line := range lines {
line = strings.TrimSpace(line)
scanner := bufio.NewScanner(bytes.NewReader(gt.Content))
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line == "" || strings.HasPrefix(line, "#") {
continue
}
Expand Down
49 changes: 31 additions & 18 deletions modules/charset/charset.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ package charset
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"strings"
"unicode/utf8"

Expand All @@ -21,6 +23,33 @@ import (
// UTF8BOM is the utf-8 byte-order marker
var UTF8BOM = []byte{'\xef', '\xbb', '\xbf'}

// ToUTF8WithFallbackReader detects the encoding of content and coverts to UTF-8 reader if possible
func ToUTF8WithFallbackReader(rd io.Reader) io.Reader {
var buf = make([]byte, 2048)
n, err := rd.Read(buf)
if err != nil {
return rd
}

charsetLabel, err := DetectEncoding(buf[:n])
if err != nil || charsetLabel == "UTF-8" {
return io.MultiReader(bytes.NewReader(RemoveBOMIfPresent(buf[:n])), rd)
}

encoding, _ := charset.Lookup(charsetLabel)
if encoding == nil {
return io.MultiReader(bytes.NewReader(buf[:n]), rd)
}

return transform.NewReader(
io.MultiReader(
bytes.NewReader(RemoveBOMIfPresent(buf[:n])),
rd,
),
encoding.NewDecoder(),
)
}

// ToUTF8WithErr converts content to UTF8 encoding
func ToUTF8WithErr(content []byte) (string, error) {
charsetLabel, err := DetectEncoding(content)
Expand Down Expand Up @@ -49,24 +78,8 @@ func ToUTF8WithErr(content []byte) (string, error) {

// ToUTF8WithFallback detects the encoding of content and coverts to UTF-8 if possible
func ToUTF8WithFallback(content []byte) []byte {
charsetLabel, err := DetectEncoding(content)
if err != nil || charsetLabel == "UTF-8" {
return RemoveBOMIfPresent(content)
}

encoding, _ := charset.Lookup(charsetLabel)
if encoding == nil {
return content
}

// If there is an error, we concatenate the nicely decoded part and the
// original left over. This way we won't lose data.
result, n, err := transform.Bytes(encoding.NewDecoder(), content)
if err != nil {
return append(result, content[n:]...)
}

return RemoveBOMIfPresent(result)
bs, _ := ioutil.ReadAll(ToUTF8WithFallbackReader(bytes.NewReader(content)))
return bs
}

// ToUTF8 converts content to UTF8 encoding and ignore error
Expand Down
26 changes: 21 additions & 5 deletions modules/csv/csv.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@ package csv
import (
"bytes"
"encoding/csv"
stdcsv "encoding/csv"
"errors"
"io"
"regexp"
"strings"

Expand All @@ -18,17 +20,31 @@ import (
var quoteRegexp = regexp.MustCompile(`["'][\s\S]+?["']`)

// CreateReader creates a csv.Reader with the given delimiter.
func CreateReader(rawBytes []byte, delimiter rune) *csv.Reader {
rd := csv.NewReader(bytes.NewReader(rawBytes))
func CreateReader(input io.Reader, delimiter rune) *stdcsv.Reader {
rd := stdcsv.NewReader(input)
rd.Comma = delimiter
rd.TrimLeadingSpace = true
return rd
}

// CreateReaderAndGuessDelimiter tries to guess the field delimiter from the content and creates a csv.Reader.
func CreateReaderAndGuessDelimiter(rawBytes []byte) *csv.Reader {
delimiter := guessDelimiter(rawBytes)
return CreateReader(rawBytes, delimiter)
func CreateReaderAndGuessDelimiter(rd io.Reader) (*stdcsv.Reader, error) {
var data = make([]byte, 1e4)
size, err := rd.Read(data)
if err != nil {
return nil, err
}

delimiter := guessDelimiter(data[:size])

var newInput io.Reader
if size < 1e4 {
newInput = bytes.NewReader(data[:size])
} else {
newInput = io.MultiReader(bytes.NewReader(data), rd)
}

return CreateReader(newInput, delimiter), nil
}

// guessDelimiter scores the input CSV data against delimiters, and returns the best match.
Expand Down
7 changes: 5 additions & 2 deletions modules/csv/csv_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,20 +5,23 @@
package csv

import (
"bytes"
"strings"
"testing"

"github.com/stretchr/testify/assert"
)

func TestCreateReader(t *testing.T) {
rd := CreateReader([]byte{}, ',')
rd := CreateReader(bytes.NewReader([]byte{}), ',')
assert.Equal(t, ',', rd.Comma)
}

func TestCreateReaderAndGuessDelimiter(t *testing.T) {
input := "a;b;c\n1;2;3\n4;5;6"

rd := CreateReaderAndGuessDelimiter([]byte(input))
rd, err := CreateReaderAndGuessDelimiter(strings.NewReader(input))
assert.NoError(t, err)
assert.Equal(t, ';', rd.Comma)
}

Expand Down
123 changes: 84 additions & 39 deletions modules/markup/csv/csv.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@
package markup

import (
"bufio"
"bytes"
"html"
"io"
"io/ioutil"
"strconv"

"code.gitea.io/gitea/modules/csv"
Expand All @@ -16,55 +18,89 @@ import (
)

func init() {
markup.RegisterParser(Parser{})
markup.RegisterRenderer(Renderer{})
}

// Parser implements markup.Parser for csv files
type Parser struct {
// Renderer implements markup.Renderer for csv files
type Renderer struct {
}

// Name implements markup.Parser
func (Parser) Name() string {
// Name implements markup.Renderer
func (Renderer) Name() string {
return "csv"
}

// NeedPostProcess implements markup.Parser
func (Parser) NeedPostProcess() bool { return false }
// NeedPostProcess implements markup.Renderer
func (Renderer) NeedPostProcess() bool { return false }

// Extensions implements markup.Parser
func (Parser) Extensions() []string {
// Extensions implements markup.Renderer
func (Renderer) Extensions() []string {
return []string{".csv", ".tsv"}
}

// Render implements markup.Parser
func (Parser) Render(rawBytes []byte, urlPrefix string, metas map[string]string, isWiki bool) []byte {
var tmpBlock bytes.Buffer

if setting.UI.CSV.MaxFileSize != 0 && setting.UI.CSV.MaxFileSize < int64(len(rawBytes)) {
tmpBlock.WriteString("<pre>")
tmpBlock.WriteString(html.EscapeString(string(rawBytes)))
tmpBlock.WriteString("</pre>")
return tmpBlock.Bytes()
func writeField(w io.Writer, element, class, field string) error {
if _, err := io.WriteString(w, "<"); err != nil {
return err
}
if _, err := io.WriteString(w, element); err != nil {
return err
}
if len(class) > 0 {
if _, err := io.WriteString(w, " class=\""); err != nil {
return err
}
if _, err := io.WriteString(w, class); err != nil {
return err
}
if _, err := io.WriteString(w, "\""); err != nil {
return err
}
}
if _, err := io.WriteString(w, ">"); err != nil {
return err
}
if _, err := io.WriteString(w, html.EscapeString(field)); err != nil {
return err
}
if _, err := io.WriteString(w, "</"); err != nil {
return err
}
if _, err := io.WriteString(w, element); err != nil {
return err
}
_, err := io.WriteString(w, ">")
return err
}

rd := csv.CreateReaderAndGuessDelimiter(rawBytes)
// Render implements markup.Renderer
func (Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error {
var tmpBlock = bufio.NewWriter(output)

writeField := func(element, class, field string) {
tmpBlock.WriteString("<")
tmpBlock.WriteString(element)
if len(class) > 0 {
tmpBlock.WriteString(" class=\"")
tmpBlock.WriteString(class)
tmpBlock.WriteString("\"")
// FIXME: don't read all to memory
rawBytes, err := ioutil.ReadAll(input)
if err != nil {
return err
}

if setting.UI.CSV.MaxFileSize != 0 && setting.UI.CSV.MaxFileSize < int64(len(rawBytes)) {
if _, err := tmpBlock.WriteString("<pre>"); err != nil {
return err
}
tmpBlock.WriteString(">")
tmpBlock.WriteString(html.EscapeString(field))
tmpBlock.WriteString("</")
tmpBlock.WriteString(element)
tmpBlock.WriteString(">")
if _, err := tmpBlock.WriteString(html.EscapeString(string(rawBytes))); err != nil {
return err
}
_, err = tmpBlock.WriteString("</pre>")
return err
}

rd, err := csv.CreateReaderAndGuessDelimiter(bytes.NewReader(rawBytes))
if err != nil {
return err
}

tmpBlock.WriteString(`<table class="data-table">`)
if _, err := tmpBlock.WriteString(`<table class="data-table">`); err != nil {
return err
}
row := 1
for {
fields, err := rd.Read()
Expand All @@ -74,20 +110,29 @@ func (Parser) Render(rawBytes []byte, urlPrefix string, metas map[string]string,
if err != nil {
continue
}
tmpBlock.WriteString("<tr>")
if _, err := tmpBlock.WriteString("<tr>"); err != nil {
return err
}
element := "td"
if row == 1 {
element = "th"
}
writeField(element, "line-num", strconv.Itoa(row))
if err := writeField(tmpBlock, element, "line-num", strconv.Itoa(row)); err != nil {
return err
}
for _, field := range fields {
writeField(element, "", field)
if err := writeField(tmpBlock, element, "", field); err != nil {
return err
}
}
if _, err := tmpBlock.WriteString("</tr>"); err != nil {
return err
}
tmpBlock.WriteString("</tr>")

row++
}
tmpBlock.WriteString("</table>")

return tmpBlock.Bytes()
if _, err = tmpBlock.WriteString("</table>"); err != nil {
return err
}
return tmpBlock.Flush()
}
Loading