Skip to content

Commit

Permalink
move the unrelated "Joiner" out of "js_printer"
Browse files Browse the repository at this point in the history
  • Loading branch information
evanw committed Mar 18, 2021
1 parent d42bb21 commit 64c7c09
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 61 deletions.
7 changes: 4 additions & 3 deletions internal/bundler/linker.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import (
"github.com/evanw/esbuild/internal/css_ast"
"github.com/evanw/esbuild/internal/css_printer"
"github.com/evanw/esbuild/internal/fs"
"github.com/evanw/esbuild/internal/helpers"
"github.com/evanw/esbuild/internal/js_ast"
"github.com/evanw/esbuild/internal/js_lexer"
"github.com/evanw/esbuild/internal/js_printer"
Expand Down Expand Up @@ -3789,7 +3790,7 @@ func (repr *chunkReprJS) generate(c *linkerContext, chunk *chunkInfo) func(gener

waitGroup.Wait()

j := js_printer.Joiner{}
j := helpers.Joiner{}
prevOffset := sourcemap.LineColumnOffset{}

// Optionally strip whitespace
Expand Down Expand Up @@ -4263,7 +4264,7 @@ func (repr *chunkReprCSS) generate(c *linkerContext, chunk *chunkInfo) func(gene
// Wait for cross-chunk import records before continuing
return func(continueData generateContinue) []OutputFile {
waitGroup.Wait()
j := js_printer.Joiner{}
j := helpers.Joiner{}
newlineBeforeComment := false

if len(c.options.CSSBanner) > 0 {
Expand Down Expand Up @@ -4513,7 +4514,7 @@ func (c *linkerContext) generateSourceMapForChunk(
chunkAbsDir string,
dataForSourceMaps []dataForSourceMap,
) []byte {
j := js_printer.Joiner{}
j := helpers.Joiner{}
j.AddString("{\n \"version\": 3")

// Only write out the sources for a given source index once
Expand Down
57 changes: 57 additions & 0 deletions internal/helpers/joiner.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
package helpers

// This provides an efficient way to join lots of big string and byte slices
// together. It avoids the cost of repeatedly reallocating as the buffer grows
// by measuring exactly how big the buffer should be and then allocating once.
// This is a measurable speedup.
type Joiner struct {
lastByte byte
strings []joinerString
bytes []joinerBytes
length uint32
}

type joinerString struct {
data string
offset uint32
}

type joinerBytes struct {
data []byte
offset uint32
}

func (j *Joiner) AddString(data string) {
if len(data) > 0 {
j.lastByte = data[len(data)-1]
}
j.strings = append(j.strings, joinerString{data, j.length})
j.length += uint32(len(data))
}

func (j *Joiner) AddBytes(data []byte) {
if len(data) > 0 {
j.lastByte = data[len(data)-1]
}
j.bytes = append(j.bytes, joinerBytes{data, j.length})
j.length += uint32(len(data))
}

func (j *Joiner) LastByte() byte {
return j.lastByte
}

func (j *Joiner) Length() uint32 {
return j.length
}

func (j *Joiner) Done() []byte {
buffer := make([]byte, j.length)
for _, item := range j.strings {
copy(buffer[item.offset:], item.data)
}
for _, item := range j.bytes {
copy(buffer[item.offset:], item.data)
}
return buffer
}
61 changes: 3 additions & 58 deletions internal/js_printer/js_printer.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (
"github.com/evanw/esbuild/internal/ast"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
"github.com/evanw/esbuild/internal/helpers"
"github.com/evanw/esbuild/internal/js_ast"
"github.com/evanw/esbuild/internal/js_lexer"
"github.com/evanw/esbuild/internal/logger"
Expand Down Expand Up @@ -45,7 +46,7 @@ type SourceMapState struct {
// After all chunks are computed, they are joined together in a second pass.
// This rewrites the first mapping in each chunk to be relative to the end
// state of the previous chunk.
func AppendSourceMapChunk(j *Joiner, prevEndState SourceMapState, startState SourceMapState, sourceMap []byte) {
func AppendSourceMapChunk(j *helpers.Joiner, prevEndState SourceMapState, startState SourceMapState, sourceMap []byte) {
// Handle line breaks in between this mapping and the previous one
if startState.GeneratedLine != 0 {
j.AddBytes(bytes.Repeat([]byte{';'}, startState.GeneratedLine))
Expand Down Expand Up @@ -80,7 +81,7 @@ func AppendSourceMapChunk(j *Joiner, prevEndState SourceMapState, startState Sou
startState.GeneratedColumn += generatedColumn
startState.OriginalLine += originalLine
startState.OriginalColumn += originalColumn
j.AddBytes(appendMapping(nil, j.lastByte, prevEndState, startState))
j.AddBytes(appendMapping(nil, j.LastByte(), prevEndState, startState))

// Then append everything after that without modification.
j.AddBytes(sourceMap)
Expand Down Expand Up @@ -111,62 +112,6 @@ func appendMapping(buffer []byte, lastByte byte, prevState SourceMapState, curre
return buffer
}

// This provides an efficient way to join lots of big string and byte slices
// together. It avoids the cost of repeatedly reallocating as the buffer grows
// by measuring exactly how big the buffer should be and then allocating once.
// This is a measurable speedup.
type Joiner struct {
lastByte byte
strings []joinerString
bytes []joinerBytes
length uint32
}

type joinerString struct {
data string
offset uint32
}

type joinerBytes struct {
data []byte
offset uint32
}

func (j *Joiner) AddString(data string) {
if len(data) > 0 {
j.lastByte = data[len(data)-1]
}
j.strings = append(j.strings, joinerString{data, j.length})
j.length += uint32(len(data))
}

func (j *Joiner) AddBytes(data []byte) {
if len(data) > 0 {
j.lastByte = data[len(data)-1]
}
j.bytes = append(j.bytes, joinerBytes{data, j.length})
j.length += uint32(len(data))
}

func (j *Joiner) LastByte() byte {
return j.lastByte
}

func (j *Joiner) Length() uint32 {
return j.length
}

func (j *Joiner) Done() []byte {
buffer := make([]byte, j.length)
for _, item := range j.strings {
copy(buffer[item.offset:], item.data)
}
for _, item := range j.bytes {
copy(buffer[item.offset:], item.data)
}
return buffer
}

const hexChars = "0123456789ABCDEF"
const firstASCII = 0x20
const lastASCII = 0x7E
Expand Down

0 comments on commit 64c7c09

Please sign in to comment.