Skip to content

Commit

Permalink
perf: introduce AST to avoid reparse
Browse files Browse the repository at this point in the history
  • Loading branch information
harttle committed Mar 14, 2020
1 parent 3b58f1c commit d2d6a38
Show file tree
Hide file tree
Showing 96 changed files with 1,557 additions and 1,172 deletions.
43 changes: 0 additions & 43 deletions bin/char-types.js

This file was deleted.

36 changes: 36 additions & 0 deletions bin/character-gen.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
#!/usr/bin/env node

const isQuote = c => c === '"' || c === "'"
const isOperator = c => '!=<>'.includes(c)
const isNumber = c => c >= '0' && c <= '9'
const isCharacter = c => (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
const isVariable = c => '_-?'.includes(c) || isCharacter(c) || isNumber(c)
const isBlank = c => c === '\n' || c === '\t' || c === ' ' || c === '\r'
const isInlineBlank = c => c === '\t' || c === ' ' || c === '\r'
const isSign = c => c === '-' || c === '+'

const types = []
for (let i = 0; i < 128; i++) {
const c = String.fromCharCode(i)
let n = 0
if (isVariable(c)) n |= 1
if (isOperator(c)) n |= 2
if (isBlank(c)) n |= 4
if (isQuote(c)) n |= 8
if (isInlineBlank(c)) n |= 16
if (isNumber(c)) n |= 32
if (isSign(c)) n |= 64
types.push(n)
}
console.log(`
// bitmask character types to boost performance
// generated by bin/character-gen.js
export const TYPES = [${types.join(', ')}]
export const VARIABLE = 1
export const OPERATOR = 2
export const BLANK = 4
export const QUOTE = 8
export const INLINE_BLANK = 16
export const NUMBER = 32
export const SIGN = 64
`.trim())
2 changes: 1 addition & 1 deletion src/builtin/filters/array.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ function slice<T> (v: T[], begin: number, length = 1): T[] {

function where<T extends object> (this: FilterImpl, arr: T[], property: string, expected?: any): T[] {
return arr.filter(obj => {
const value = this.context.getFromScope(obj, property)
const value = this.context.getFromScope(obj, property.split('.'))
return expected === undefined ? isTruthy(value) : value === expected
})
}
Expand Down
16 changes: 7 additions & 9 deletions src/builtin/tags/assign.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
import { assert } from '../../util/assert'
import { identifier } from '../../parser/lexical'
import { TagImplOptions, TagToken, Context } from '../../types'

const re = new RegExp(`(${identifier.source})\\s*=([^]*)`)
import { Tokenizer, assert, TagImplOptions, TagToken, Context } from '../../types'

export default {
parse: function (token: TagToken) {
const match = token.args.match(re) as RegExpMatchArray
assert(match, `illegal token ${token.raw}`)
this.key = match[1]
this.value = match[2]
const tokenizer = new Tokenizer(token.args)
this.key = tokenizer.readWord().content
tokenizer.skipBlank()
assert(tokenizer.peek() === '=', () => `illegal token ${token.getText()}`)
tokenizer.advance()
this.value = tokenizer.remaining()
},
render: function * (ctx: Context) {
ctx.bottom()[this.key] = yield this.liquid._evalValue(this.value, ctx)
Expand Down
6 changes: 3 additions & 3 deletions src/builtin/tags/block.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
import BlockMode from '../../context/block-mode'
import { ParseStream, TagToken, Token, Template, Context, TagImplOptions, Emitter } from '../../types'
import { ParseStream, TagToken, TopLevelToken, Template, Context, TagImplOptions, Emitter } from '../../types'

export default {
parse: function (token: TagToken, remainTokens: Token[]) {
parse: function (token: TagToken, remainTokens: TopLevelToken[]) {
const match = /\w+/.exec(token.args)
this.block = match ? match[0] : ''
this.tpls = [] as Template[]
const stream: ParseStream = this.liquid.parser.parseStream(remainTokens)
.on('tag:endblock', () => stream.stop())
.on('template', (tpl: Template) => this.tpls.push(tpl))
.on('end', () => {
throw new Error(`tag ${token.raw} not closed`)
throw new Error(`tag ${token.getText()} not closed`)
})
stream.start()
},
Expand Down
16 changes: 6 additions & 10 deletions src/builtin/tags/capture.ts
Original file line number Diff line number Diff line change
@@ -1,22 +1,18 @@
import { assert } from '../../util/assert'
import { identifier } from '../../parser/lexical'
import { Template, Context, TagImplOptions, TagToken, Token } from '../../types'

const re = new RegExp(`(${identifier.source})`)
import { Tokenizer, assert, Template, Context, TagImplOptions, TagToken, TopLevelToken } from '../../types'

export default {
parse: function (tagToken: TagToken, remainTokens: Token[]) {
const match = tagToken.args.match(re) as RegExpMatchArray
assert(match, `${tagToken.args} not valid identifier`)
parse: function (tagToken: TagToken, remainTokens: TopLevelToken[]) {
const tokenizer = new Tokenizer(tagToken.args)
this.variable = tokenizer.readWord().content
assert(this.variable, () => `${tagToken.args} not valid identifier`)

this.variable = match[1]
this.templates = []

const stream = this.liquid.parser.parseStream(remainTokens)
stream.on('tag:endcapture', () => stream.stop())
.on('template', (tpl: Template) => this.templates.push(tpl))
.on('end', () => {
throw new Error(`tag ${tagToken.raw} not closed`)
throw new Error(`tag ${tagToken.getText()} not closed`)
})
stream.start()
},
Expand Down
6 changes: 3 additions & 3 deletions src/builtin/tags/case.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Expression, Emitter, TagToken, Token, Context, Template, TagImplOptions, ParseStream } from '../../types'
import { Expression, Emitter, TagToken, TopLevelToken, Context, Template, TagImplOptions, ParseStream } from '../../types'

export default {
parse: function (tagToken: TagToken, remainTokens: Token[]) {
parse: function (tagToken: TagToken, remainTokens: TopLevelToken[]) {
this.cond = tagToken.args
this.cases = []
this.elseTemplates = []
Expand All @@ -18,7 +18,7 @@ export default {
.on('tag:endcase', () => stream.stop())
.on('template', (tpl: Template) => p.push(tpl))
.on('end', () => {
throw new Error(`tag ${tagToken.raw} not closed`)
throw new Error(`tag ${tagToken.getText()} not closed`)
})

stream.start()
Expand Down
8 changes: 4 additions & 4 deletions src/builtin/tags/comment.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
import { TagToken } from '../../parser/tag-token'
import { Token } from '../../parser/token'
import { TagToken } from '../../tokens/tag-token'
import { TopLevelToken } from '../../tokens/toplevel-token'
import { TagImplOptions } from '../../template/tag/tag-impl-options'

export default {
parse: function (tagToken: TagToken, remainTokens: Token[]) {
parse: function (tagToken: TagToken, remainTokens: TopLevelToken[]) {
const stream = this.liquid.parser.parseStream(remainTokens)
stream
.on('token', (token: TagToken) => {
if (token.name === 'endcomment') stream.stop()
})
.on('end', () => {
throw new Error(`tag ${tagToken.raw} not closed`)
throw new Error(`tag ${tagToken.getText()} not closed`)
})
stream.start()
}
Expand Down
36 changes: 20 additions & 16 deletions src/builtin/tags/cycle.ts
Original file line number Diff line number Diff line change
@@ -1,28 +1,32 @@
import { assert } from '../../util/assert'
import { value as rValue } from '../../parser/lexical'
import { Emitter, Expression, TagToken, Context, TagImplOptions } from '../../types'

const groupRE = new RegExp(`^(?:(${rValue.source})\\s*:\\s*)?(.*)$`)
const candidatesRE = new RegExp(rValue.source, 'g')
import { evalToken, Emitter, TagToken, Context, TagImplOptions } from '../../types'
import { Tokenizer } from '../../parser/tokenizer'

export default {
parse: function (tagToken: TagToken) {
let match: RegExpExecArray | null = groupRE.exec(tagToken.args) as RegExpExecArray
assert(match, `illegal tag: ${tagToken.raw}`)

this.group = new Expression(match[1])
const candidates = match[2]
const tokenizer = new Tokenizer(tagToken.args)
const group = tokenizer.readValue()
tokenizer.skipBlank()

this.candidates = []

while ((match = candidatesRE.exec(candidates))) {
this.candidates.push(match[0])
if (group) {
if (tokenizer.peek() === ':') {
this.group = group
tokenizer.advance()
} else this.candidates.push(group)
}

while (!tokenizer.end()) {
const value = tokenizer.readValue()
if (value) this.candidates.push(value)
tokenizer.readTo(',')
}
assert(this.candidates.length, `empty candidates: ${tagToken.raw}`)
assert(this.candidates.length, () => `empty candidates: ${tagToken.getText()}`)
},

render: function * (ctx: Context, emitter: Emitter) {
const group = yield this.group.value(ctx)
render: function (ctx: Context, emitter: Emitter) {
const group = evalToken(this.group, ctx)
const fingerprint = `cycle:${group}:` + this.candidates.join(',')
const groups = ctx.getRegister('cycle')
let idx = groups[fingerprint]
Expand All @@ -34,7 +38,7 @@ export default {
const candidate = this.candidates[idx]
idx = (idx + 1) % this.candidates.length
groups[fingerprint] = idx
const html = yield new Expression(candidate).value(ctx)
const html = evalToken(candidate, ctx)
emitter.write(html)
}
} as TagImplOptions
9 changes: 3 additions & 6 deletions src/builtin/tags/decrement.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
import { assert } from '../../util/assert'
import { identifier } from '../../parser/lexical'
import { Emitter, TagToken, Context, TagImplOptions } from '../../types'
import { Tokenizer, Emitter, TagToken, Context, TagImplOptions } from '../../types'
import { isNumber, stringify } from '../../util/underscore'

export default {
parse: function (token: TagToken) {
const match = token.args.match(identifier) as RegExpMatchArray
assert(match, `illegal identifier ${token.args}`)
this.variable = match[0]
const tokenizer = new Tokenizer(token.args)
this.variable = tokenizer.readWord().content
},
render: function (context: Context, emitter: Emitter) {
const scope = context.environments
Expand Down
32 changes: 17 additions & 15 deletions src/builtin/tags/for.ts
Original file line number Diff line number Diff line change
@@ -1,21 +1,24 @@
import { Emitter, TagToken, Token, Context, Template, TagImplOptions, ParseStream } from '../../types'
import { assert, Tokenizer, evalToken, Emitter, TagToken, TopLevelToken, Context, Template, TagImplOptions, ParseStream } from '../../types'
import { toCollection } from '../../util/collection'
import { Expression } from '../../render/expression'
import { assert } from '../../util/assert'
import { identifier, value } from '../../parser/lexical'
import { ForloopDrop } from '../../drop/forloop-drop'
import { Hash } from '../../template/tag/hash'

const re = new RegExp(`^(${identifier.source})\\s+in\\s+(${value.source})`)

export default {
type: 'block',
parse: function (tagToken: TagToken, remainTokens: Token[]) {
const match = re.exec(tagToken.args) as RegExpExecArray
assert(match, `illegal tag: ${tagToken.raw}`)
this.variable = match[1]
this.collection = match[2]
this.hash = new Hash(tagToken.args.slice(match[0].length))
parse: function (token: TagToken, remainTokens: TopLevelToken[]) {
const toknenizer = new Tokenizer(token.args)

const variable = toknenizer.readWord()
const inStr = toknenizer.readWord()
const collection = toknenizer.readValue()
assert(
variable.size() && inStr.content === 'in' && collection,
() => `illegal tag: ${token.getText()}`
)

this.variable = variable.content
this.collection = collection
this.hash = new Hash(toknenizer.remaining())
this.templates = []
this.elseTemplates = []

Expand All @@ -26,15 +29,14 @@ export default {
.on('tag:endfor', () => stream.stop())
.on('template', (tpl: Template) => p.push(tpl))
.on('end', () => {
throw new Error(`tag ${tagToken.raw} not closed`)
throw new Error(`tag ${token.getText()} not closed`)
})

stream.start()
},
render: function * (ctx: Context, emitter: Emitter) {
const r = this.liquid.renderer
let collection = yield new Expression(this.collection).value(ctx)
collection = toCollection(collection)
let collection = toCollection(evalToken(this.collection, ctx))

if (!collection.length) {
yield r.renderTemplates(this.elseTemplates, ctx, emitter)
Expand Down
6 changes: 3 additions & 3 deletions src/builtin/tags/if.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Emitter, isTruthy, Expression, TagToken, Token, Context, Template, TagImplOptions, ParseStream } from '../../types'
import { Emitter, isTruthy, Expression, TagToken, TopLevelToken, Context, Template, TagImplOptions, ParseStream } from '../../types'

export default {
parse: function (tagToken: TagToken, remainTokens: Token[]) {
parse: function (tagToken: TagToken, remainTokens: TopLevelToken[]) {
this.branches = []
this.elseTemplates = []

Expand All @@ -21,7 +21,7 @@ export default {
.on('tag:endif', () => stream.stop())
.on('template', (tpl: Template) => p.push(tpl))
.on('end', () => {
throw new Error(`tag ${tagToken.raw} not closed`)
throw new Error(`tag ${tagToken.getText()} not closed`)
})

stream.start()
Expand Down
Loading

0 comments on commit d2d6a38

Please sign in to comment.