From 72abe8bac92127693250e754bed6a3a6e9c3f16f Mon Sep 17 00:00:00 2001 From: Lexus Drumgold Date: Thu, 25 Jul 2024 21:59:24 -0400 Subject: [PATCH 1/2] feat: stream support - closes #31 - closes #35 Signed-off-by: Lexus Drumgold --- .codecov.yml | 2 +- .dictionary.txt | 2 + .dprint.jsonc | 3 +- .markdownlintignore | 1 + __fixtures__/hello.txt | 2 - __fixtures__/inline-tag.txt | 1 - __fixtures__/markdown/code-fenced.md | 31 + __fixtures__/markdown/code-indented.md | 18 + __fixtures__/markdown/code-text.md | 8 + __fixtures__/markdown/empty.md | 0 __fixtures__/markdown/html-flow.md | 16 + __fixtures__/numerics.txt | 20 - __fixtures__/strings.txt | 3 - __fixtures__/tk.ts | 21 - __fixtures__/tt.ts | 16 + __fixtures__/type-metadata.txt | 2 + __tests__/constructs/code-fenced.ts | 37 + __tests__/constructs/code-text.ts | 39 + __tests__/constructs/eof.ts | 90 +++ __tests__/constructs/html-flow.ts | 38 + __tests__/constructs/index.ts | 12 +- __tests__/constructs/inline-tag.ts | 186 ----- __tests__/constructs/line-ending.ts | 24 + __tests__/constructs/micromark.ts | 30 + __tests__/constructs/numeric.ts | 142 ---- __tests__/constructs/punctuator.ts | 69 -- __tests__/constructs/string.ts | 135 ---- __tests__/constructs/type-metadata.ts | 127 +++ __tests__/constructs/ws.ts | 93 --- __tests__/interfaces/index.ts | 6 - __tests__/interfaces/mock-instance.ts | 19 - __tests__/utils/finalize-context.ts | 35 + __tests__/utils/inspect.ts | 58 +- __tests__/utils/is-point.ts | 2 +- __tests__/utils/list.ts | 47 ++ __tests__/utils/token.ts | 31 - eslint.base.config.mjs | 3 +- eslint.config.mjs | 22 +- package.json | 23 +- src/__snapshots__/tokenize.integration.snap | 192 ++--- src/__tests__/index.e2e.spec.ts | 4 +- src/__tests__/tokenize.integration.spec.ts | 123 +-- src/constructs/eof.ts | 68 -- src/constructs/index.ts | 1 - src/constructs/initialize.ts | 77 +- src/enums/chars.ts | 155 ++++ src/enums/codes.ts | 170 ++++ src/enums/index.ts | 3 +- src/enums/tt.ts | 16 - src/index.ts | 3 +- .../__tests__/construct-record.spec-d.ts | 31 + src/interfaces/__tests__/construct.spec-d.ts | 30 +- .../__tests__/effects.spec-d.ts | 9 +- .../__tests__/options-preprocess.spec-d.ts | 15 + src/interfaces/__tests__/options.spec-d.ts | 45 +- src/interfaces/__tests__/place.spec-d.ts | 17 + src/interfaces/__tests__/position.spec-d.ts | 17 + .../__tests__/token-fields.spec-d.ts | 12 + src/interfaces/__tests__/token-info.spec-d.ts | 31 + .../__tests__/token-type-map.spec-d.ts | 4 +- src/interfaces/__tests__/token.spec-d.ts | 18 +- .../__tests__/tokenize-context.spec-d.ts | 76 +- src/interfaces/construct-record.ts | 27 + src/interfaces/construct.ts | 14 +- src/{types => interfaces}/effects.ts | 11 +- src/interfaces/index.ts | 9 +- src/interfaces/options-preprocess.ts | 18 + src/interfaces/options.ts | 54 +- src/interfaces/place.ts | 22 + src/interfaces/position.ts | 27 + src/interfaces/token-fields.ts | 20 + src/interfaces/token-info.ts | 35 + src/interfaces/token-type-map.ts | 9 +- src/interfaces/token.ts | 20 +- src/interfaces/tokenize-context.ts | 84 +- src/lexer.ts | 728 +++++++++++++----- src/preprocess.ts | 136 ++++ src/tokenize.ts | 78 +- src/types/__tests__/chunk.spec-d.ts | 17 + src/types/__tests__/code-check.spec-d.ts | 21 + src/types/__tests__/code.spec-d.ts | 16 + src/types/__tests__/construct-pack.spec-d.ts | 17 + .../__tests__/construct-record.spec-d.ts | 31 - .../__tests__/constructs-record.spec-d.ts | 21 - src/types/__tests__/constructs.spec-d.ts | 8 +- src/types/__tests__/consume.spec-d.ts | 2 +- src/types/__tests__/define-skip.spec-d.ts | 25 + src/types/__tests__/encoding.spec-d.ts | 32 + src/types/__tests__/enter.spec-d.ts | 7 +- src/types/__tests__/event-type.spec-d.ts | 4 - src/types/__tests__/event.spec-d.ts | 4 +- src/types/__tests__/file-like.spec-d.ts | 13 + .../__tests__/finalize-context.spec-d.ts | 4 +- src/types/__tests__/guard.spec-d.ts | 2 +- src/types/__tests__/initializer.spec-d.ts | 3 +- src/types/__tests__/now.spec-d.ts | 22 + src/types/__tests__/preprocessor.spec-d.ts | 32 + src/types/__tests__/slice-serialize.spec-d.ts | 28 + src/types/__tests__/slice-stream.spec-d.ts | 22 + src/types/__tests__/state.spec-d.ts | 2 +- src/types/__tests__/token-factory.spec-d.ts | 7 +- src/types/__tests__/token-fields.spec-d.ts | 13 - .../__tests__/tokenize-options.spec-d.ts | 17 + src/types/__tests__/tokenizer.spec-d.ts | 3 +- src/types/__tests__/value.spec-d.ts | 16 + src/types/__tests__/write.spec-d.ts | 22 + src/types/chunk.ts | 15 + src/types/code-check.ts | 18 + src/types/code.ts | 17 + src/types/construct-pack.ts | 15 + src/types/construct-record.ts | 31 - src/types/constructs-record.ts | 15 - src/types/constructs.ts | 8 +- src/types/consume.ts | 2 +- src/types/define-skip.ts | 25 + src/types/encoding.ts | 23 + src/types/enter.ts | 7 +- src/types/event-type.ts | 6 +- src/types/file-like.ts | 20 + src/types/finalize-context.ts | 10 +- src/types/guard.ts | 2 +- src/types/index.ts | 28 +- src/types/initializer.ts | 3 +- src/types/now.ts | 17 + src/types/preprocessor.ts | 31 + src/types/slice-serialize.ts | 22 + src/types/slice-stream.ts | 20 + src/types/state.ts | 2 +- src/types/token-factory.ts | 9 +- src/types/token-fields.ts | 15 - src/types/token-type.ts | 2 +- src/types/tokenize-options.ts | 16 + src/types/tokenizer.ts | 3 +- src/types/value.ts | 13 + src/types/write.ts | 22 + src/utils/__tests__/is-line-ending.spec.ts | 25 + .../__tests__/resolve-all.functional.spec.ts | 16 +- .../resolve-slice.functional.spec.ts | 19 +- src/utils/index.ts | 3 +- src/utils/is-line-ending.ts | 32 + src/utils/resolve-all.ts | 12 +- src/utils/resolve-slice.ts | 6 +- src/utils/resolve-token-list.ts | 55 ++ .../@flex-development/vfile-lexer/index.d.mts | 15 +- yarn.lock | 323 +++----- 145 files changed, 3231 insertions(+), 1926 deletions(-) delete mode 100644 __fixtures__/hello.txt delete mode 100644 __fixtures__/inline-tag.txt create mode 100644 __fixtures__/markdown/code-fenced.md create mode 100644 __fixtures__/markdown/code-indented.md create mode 100644 __fixtures__/markdown/code-text.md create mode 100644 __fixtures__/markdown/empty.md create mode 100644 __fixtures__/markdown/html-flow.md delete mode 100644 __fixtures__/numerics.txt delete mode 100644 __fixtures__/strings.txt delete mode 100644 __fixtures__/tk.ts create mode 100644 __fixtures__/tt.ts create mode 100644 __fixtures__/type-metadata.txt create mode 100644 __tests__/constructs/code-fenced.ts create mode 100644 __tests__/constructs/code-text.ts create mode 100644 __tests__/constructs/eof.ts create mode 100644 __tests__/constructs/html-flow.ts delete mode 100644 __tests__/constructs/inline-tag.ts create mode 100644 __tests__/constructs/line-ending.ts create mode 100644 __tests__/constructs/micromark.ts delete mode 100644 __tests__/constructs/numeric.ts delete mode 100644 __tests__/constructs/punctuator.ts delete mode 100644 __tests__/constructs/string.ts create mode 100644 __tests__/constructs/type-metadata.ts delete mode 100644 __tests__/constructs/ws.ts delete mode 100644 __tests__/interfaces/index.ts delete mode 100644 __tests__/interfaces/mock-instance.ts create mode 100644 __tests__/utils/finalize-context.ts create mode 100644 __tests__/utils/list.ts delete mode 100644 __tests__/utils/token.ts delete mode 100644 src/constructs/eof.ts create mode 100644 src/enums/chars.ts create mode 100644 src/enums/codes.ts delete mode 100644 src/enums/tt.ts create mode 100644 src/interfaces/__tests__/construct-record.spec-d.ts rename src/{types => interfaces}/__tests__/effects.spec-d.ts (79%) create mode 100644 src/interfaces/__tests__/options-preprocess.spec-d.ts create mode 100644 src/interfaces/__tests__/place.spec-d.ts create mode 100644 src/interfaces/__tests__/position.spec-d.ts create mode 100644 src/interfaces/__tests__/token-fields.spec-d.ts create mode 100644 src/interfaces/__tests__/token-info.spec-d.ts create mode 100644 src/interfaces/construct-record.ts rename src/{types => interfaces}/effects.ts (77%) create mode 100644 src/interfaces/options-preprocess.ts create mode 100644 src/interfaces/place.ts create mode 100644 src/interfaces/position.ts create mode 100644 src/interfaces/token-fields.ts create mode 100644 src/interfaces/token-info.ts create mode 100644 src/preprocess.ts create mode 100644 src/types/__tests__/chunk.spec-d.ts create mode 100644 src/types/__tests__/code-check.spec-d.ts create mode 100644 src/types/__tests__/code.spec-d.ts create mode 100644 src/types/__tests__/construct-pack.spec-d.ts delete mode 100644 src/types/__tests__/construct-record.spec-d.ts delete mode 100644 src/types/__tests__/constructs-record.spec-d.ts create mode 100644 src/types/__tests__/define-skip.spec-d.ts create mode 100644 src/types/__tests__/encoding.spec-d.ts create mode 100644 src/types/__tests__/file-like.spec-d.ts create mode 100644 src/types/__tests__/now.spec-d.ts create mode 100644 src/types/__tests__/preprocessor.spec-d.ts create mode 100644 src/types/__tests__/slice-serialize.spec-d.ts create mode 100644 src/types/__tests__/slice-stream.spec-d.ts delete mode 100644 src/types/__tests__/token-fields.spec-d.ts create mode 100644 src/types/__tests__/tokenize-options.spec-d.ts create mode 100644 src/types/__tests__/value.spec-d.ts create mode 100644 src/types/__tests__/write.spec-d.ts create mode 100644 src/types/chunk.ts create mode 100644 src/types/code-check.ts create mode 100644 src/types/code.ts create mode 100644 src/types/construct-pack.ts delete mode 100644 src/types/construct-record.ts delete mode 100644 src/types/constructs-record.ts create mode 100644 src/types/define-skip.ts create mode 100644 src/types/encoding.ts create mode 100644 src/types/file-like.ts create mode 100644 src/types/now.ts create mode 100644 src/types/preprocessor.ts create mode 100644 src/types/slice-serialize.ts create mode 100644 src/types/slice-stream.ts delete mode 100644 src/types/token-fields.ts create mode 100644 src/types/tokenize-options.ts create mode 100644 src/types/value.ts create mode 100644 src/types/write.ts create mode 100644 src/utils/__tests__/is-line-ending.spec.ts create mode 100644 src/utils/is-line-ending.ts create mode 100644 src/utils/resolve-token-list.ts diff --git a/.codecov.yml b/.codecov.yml index 1d192e6..639dd60 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -90,6 +90,6 @@ ignore: profiling: critical_files_paths: - - src/constructs/eof.ts - src/constructs/initialize.ts - src/lexer.ts + - src/preprocess.ts diff --git a/.dictionary.txt b/.dictionary.txt index 21016f5..0719d72 100644 --- a/.dictionary.txt +++ b/.dictionary.txt @@ -13,6 +13,7 @@ fbca ggshield gpgsign hmarr +iife jchen kaisugi lcov @@ -33,3 +34,4 @@ vates vfile vitest yarnrc +zwnj diff --git a/.dprint.jsonc b/.dprint.jsonc index 585cdf6..5242e67 100644 --- a/.dprint.jsonc +++ b/.dprint.jsonc @@ -24,7 +24,8 @@ "CHANGELOG.md", "LICENSE.md", "RELEASE_NOTES.md", - "yarn.lock" + "yarn.lock", + "__fixtures__/markdown/*.md" ], "exec": { "commands": [ diff --git a/.markdownlintignore b/.markdownlintignore index 4a4c8c7..3a22d74 100644 --- a/.markdownlintignore +++ b/.markdownlintignore @@ -5,3 +5,4 @@ **/CHANGELOG.md **/LICENSE.md **/RELEASE_NOTES.md +__fixtures__/markdown/*.md diff --git a/__fixtures__/hello.txt b/__fixtures__/hello.txt deleted file mode 100644 index 33c08f5..0000000 --- a/__fixtures__/hello.txt +++ /dev/null @@ -1,2 +0,0 @@ -const 你好 = "hello 👋"; -console.log(\u4f60\u597d); // hello 👋 diff --git a/__fixtures__/inline-tag.txt b/__fixtures__/inline-tag.txt deleted file mode 100644 index e7c646a..0000000 --- a/__fixtures__/inline-tag.txt +++ /dev/null @@ -1 +0,0 @@ -{@linkcode Code} diff --git a/__fixtures__/markdown/code-fenced.md b/__fixtures__/markdown/code-fenced.md new file mode 100644 index 0000000..f844db6 --- /dev/null +++ b/__fixtures__/markdown/code-fenced.md @@ -0,0 +1,31 @@ +``` +fenced code +``` + +```js +fenced code with a language +``` + +```js line=1 +fenced code with meta +``` + +~~~ +fenced code with tildes +~~~ + +```not fenced code``` + +~~~fenced code~~~ +asd +~~~ + +``` + +asd +``` + +``` +asd + +``` diff --git a/__fixtures__/markdown/code-indented.md b/__fixtures__/markdown/code-indented.md new file mode 100644 index 0000000..8dffc03 --- /dev/null +++ b/__fixtures__/markdown/code-indented.md @@ -0,0 +1,18 @@ + indented code + + + more indented code +Not indented code + + more + indent + +Not code. + + tabs + and mixed with spaces + extra spaces + +Not code. + + a tab diff --git a/__fixtures__/markdown/code-text.md b/__fixtures__/markdown/code-text.md new file mode 100644 index 0000000..b6fd13a --- /dev/null +++ b/__fixtures__/markdown/code-text.md @@ -0,0 +1,8 @@ +A couple of code examples: `a`, ` b`, `c `, ` d `, ` e +`, ` f +`, `g +`, ` +h`. + +And: `alpha bravo charlie + delta echo`. diff --git a/__fixtures__/markdown/empty.md b/__fixtures__/markdown/empty.md new file mode 100644 index 0000000..e69de29 diff --git a/__fixtures__/markdown/html-flow.md b/__fixtures__/markdown/html-flow.md new file mode 100644 index 0000000..d33624e --- /dev/null +++ b/__fixtures__/markdown/html-flow.md @@ -0,0 +1,16 @@ + + + + + + + + + + +
diff --git a/__fixtures__/numerics.txt b/__fixtures__/numerics.txt deleted file mode 100644 index e10116b..0000000 --- a/__fixtures__/numerics.txt +++ /dev/null @@ -1,20 +0,0 @@ -0 -0n -1 -1n -2 -2n -3 -3n -4 -4n -5 -5n -6 -6n -7 -7n -8 -8n -9 -9n diff --git a/__fixtures__/strings.txt b/__fixtures__/strings.txt deleted file mode 100644 index 944a298..0000000 --- a/__fixtures__/strings.txt +++ /dev/null @@ -1,3 +0,0 @@ -'😍' -"👍" -\'🚀\' diff --git a/__fixtures__/tk.ts b/__fixtures__/tk.ts deleted file mode 100644 index 32ed53d..0000000 --- a/__fixtures__/tk.ts +++ /dev/null @@ -1,21 +0,0 @@ -/** - * @file Fixtures - tk - * @module fixtures/tk - */ - -/** - * Token types. - * - * @enum {Lowercase} - */ -enum tk { - bigint = 'bigint', - inlineTag = 'inlineTag', - number = 'number', - punctuator = 'punctuator', - string = 'string', - tag = 'tag', - whitespace = 'whitespace' -} - -export default tk diff --git a/__fixtures__/tt.ts b/__fixtures__/tt.ts new file mode 100644 index 0000000..f86ac51 --- /dev/null +++ b/__fixtures__/tt.ts @@ -0,0 +1,16 @@ +/** + * @file Fixtures - tt + * @module fixtures/tt + */ + +/** + * Token types. + * + * @enum {string} + */ +enum tt { + eof = 'eof', + typeMetadata = 'typeMetadata' +} + +export default tt diff --git a/__fixtures__/type-metadata.txt b/__fixtures__/type-metadata.txt new file mode 100644 index 0000000..33361ed --- /dev/null +++ b/__fixtures__/type-metadata.txt @@ -0,0 +1,2 @@ +{{ id: string }} +{string diff --git a/__tests__/constructs/code-fenced.ts b/__tests__/constructs/code-fenced.ts new file mode 100644 index 0000000..ae1a675 --- /dev/null +++ b/__tests__/constructs/code-fenced.ts @@ -0,0 +1,37 @@ +/** + * @file Test Constructs - codeFenced + * @module tests/constructs/codeFenced + */ + +import { codes } from '#src/enums' +import type { Construct, TokenizeContext } from '#src/interfaces' +import type { Code, Tokenizer } from '#src/types' +import * as micromark from 'micromark-core-commonmark' + +/** + * Fenced code construct. + * + * @const {Construct} codeFenced + */ +const codeFenced: Construct = { + name: micromark.codeFenced.name, + test, + tokenize: micromark.codeFenced.tokenize as unknown as Tokenizer +} + +export default codeFenced + +/** + * Check if the current character `code` can start this construct. + * + * @see {@linkcode Code} + * @see {@linkcode TokenizeContext} + * + * @this {TokenizeContext} + * + * @param {Code} code - Current character code + * @return {boolean} `true` if `code` can start construct + */ +function test(this: TokenizeContext, code: Code): boolean { + return code === codes.graveAccent || code === codes.tilde +} diff --git a/__tests__/constructs/code-text.ts b/__tests__/constructs/code-text.ts new file mode 100644 index 0000000..edc9b28 --- /dev/null +++ b/__tests__/constructs/code-text.ts @@ -0,0 +1,39 @@ +/** + * @file Test Constructs - codeText + * @module tests/constructs/codeText + */ + +import { codes } from '#src/enums' +import type { Construct, TokenizeContext } from '#src/interfaces' +import type { Code, Guard, Resolver, Tokenizer } from '#src/types' +import * as micromark from 'micromark-core-commonmark' + +/** + * Inline code construct. + * + * @const {Construct} codeText + */ +const codeText: Construct = { + name: micromark.codeText.name, + previous: micromark.codeText.previous as unknown as Guard, + resolve: micromark.codeText.resolve as unknown as Resolver, + test, + tokenize: micromark.codeText.tokenize as unknown as Tokenizer +} + +export default codeText + +/** + * Check if the current character `code` can start this construct. + * + * @see {@linkcode Code} + * @see {@linkcode TokenizeContext} + * + * @this {TokenizeContext} + * + * @param {Code} code - Current character code + * @return {boolean} `true` if `code` can start construct + */ +function test(this: TokenizeContext, code: Code): boolean { + return code === codes.graveAccent +} diff --git a/__tests__/constructs/eof.ts b/__tests__/constructs/eof.ts new file mode 100644 index 0000000..db801a5 --- /dev/null +++ b/__tests__/constructs/eof.ts @@ -0,0 +1,90 @@ +/** + * @file Test Constructs - eof + * @module tests/constructs/eof + */ + +import tt from '#fixtures/tt' +import { codes, ev } from '#src/enums' +import type { Construct, Effects, TokenizeContext } from '#src/interfaces' +import type { Code, Event, State } from '#src/types' + +/** + * End of file construct. + * + * @const {Construct} eof + */ +const eof: Construct = { name: tt.eof, previous, resolveAll, tokenize } + +export default eof + +/** + * Check if the previous character `code` can come before this construct. + * + * @see {@linkcode Code} + * @see {@linkcode TokenizeContext} + * + * @this {TokenizeContext} + * + * @param {Code} code - Previous character code + * @return {boolean} `true` if `code` allowed before construct + */ +function previous(this: TokenizeContext, code: Code): boolean { + return typeof code === 'number' +} + +/** + * Resolve all events. + * + * @see {@linkcode Event} + * @see {@linkcode TokenizeContext} + * + * @param {Event[]} events - List of events + * @param {TokenizeContext} context - Tokenize context + * @return {Event[]} Changed events + */ +function resolveAll(events: Event[], context: TokenizeContext): Event[] { + for (const [event, token] of events) { + if (event === ev.enter && token.type !== tt.eof) { + token.value = context.sliceSerialize(token) + } + } + + return events +} + +/** + * Set up a state machine to handle character codes streaming in. + * + * @see {@linkcode Effects} + * @see {@linkcode State} + * @see {@linkcode TokenizeContext} + * + * @this {TokenizeContext} + * + * @param {Effects} effects - Context object to transition state machine + * @param {State} ok - Successful tokenization state + * @param {State} nok - Failed tokenization state + * @return {State} Initial state + */ +function tokenize( + this: TokenizeContext, + effects: Effects, + ok: State, + nok: State +): State { + return eof + + /** + * Tokenize end of file. + * + * @param {Code} code - Current character code + * @return {State | undefined} Next state + */ + function eof(code: Code): State | undefined { + if (code !== codes.eof) return nok(code) + effects.enter(tt.eof) + effects.consume(code) + effects.exit(tt.eof) + return ok + } +} diff --git a/__tests__/constructs/html-flow.ts b/__tests__/constructs/html-flow.ts new file mode 100644 index 0000000..784b49b --- /dev/null +++ b/__tests__/constructs/html-flow.ts @@ -0,0 +1,38 @@ +/** + * @file Test Constructs - htmlFlow + * @module tests/constructs/htmlFlow + */ + +import { codes } from '#src/enums' +import type { Construct, TokenizeContext } from '#src/interfaces' +import type { Code, Resolver, Tokenizer } from '#src/types' +import * as micromark from 'micromark-core-commonmark' + +/** + * HTML flow construct. + * + * @const {Construct} htmlFlow + */ +const htmlFlow: Construct = { + name: micromark.htmlFlow.name, + resolveTo: micromark.htmlFlow.resolveTo as unknown as Resolver, + test, + tokenize: micromark.htmlFlow.tokenize as unknown as Tokenizer +} + +export default htmlFlow + +/** + * Check if the current character `code` can start this construct. + * + * @see {@linkcode Code} + * @see {@linkcode TokenizeContext} + * + * @this {TokenizeContext} + * + * @param {Code} code - Current character code + * @return {boolean} `true` if `code` can start construct + */ +function test(this: TokenizeContext, code: Code): boolean { + return code === codes.lt +} diff --git a/__tests__/constructs/index.ts b/__tests__/constructs/index.ts index 496eb8e..07bfa39 100644 --- a/__tests__/constructs/index.ts +++ b/__tests__/constructs/index.ts @@ -3,8 +3,10 @@ * @module tests/constructs */ -export { default as inlineTag } from './inline-tag' -export { default as numeric } from './numeric' -export { default as punctuator } from './punctuator' -export { default as string } from './string' -export { default as ws } from './ws' +export { default as codeFenced } from './code-fenced' +export { default as codeText } from './code-text' +export { default as eof } from './eof' +export { default as htmlFlow } from './html-flow' +export { default as lineEnding } from './line-ending' +export { default as micromark } from './micromark' +export { default as typeMetadata } from './type-metadata' diff --git a/__tests__/constructs/inline-tag.ts b/__tests__/constructs/inline-tag.ts deleted file mode 100644 index df38b69..0000000 --- a/__tests__/constructs/inline-tag.ts +++ /dev/null @@ -1,186 +0,0 @@ -/** - * @file Test Constructs - inlineTag - * @module tests/constructs/inlineTag - */ - -import tk from '#fixtures/tk' -import type { Construct, TokenizeContext } from '#src/interfaces' -import type { Effects, Event, State } from '#src/types' -import { codes, type Code } from '@flex-development/vfile-reader' -import { ok as assert } from 'devlop' -import { asciiAlpha } from 'micromark-util-character' - -/** - * Inline tag construct. - * - * @const {Construct} inlineTag - */ -const inlineTag: Construct = { - /** - * Construct name. - */ - name: tk.inlineTag, - - /** - * Check if the previous character `code` can come before this construct. - * - * @see {@linkcode Code} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Code} code - Previous character code - * @return {boolean} `true` if `code` allowed before construct - */ - previous(this: TokenizeContext, code: Code): boolean { - return code !== codes.backslash - }, - - /** - * Resolve all events when the content is complete, from the start to the end. - * Only used if `tokenize`is successful once in the content. - * - * @see {@linkcode Construct.tokenize} - * @see {@linkcode Event} - * @see {@linkcode TokenizeContext} - * - * @param {Event[]} events - List of events - * @param {TokenizeContext} context - Tokenize context - * @return {Event[]} Changed events - */ - resolveAll(events: Event[], context: TokenizeContext): Event[] { - for (const [, token] of events) { - if (token.type === tk.inlineTag) { - assert(token.next, 'expected next token') - - if (token.next.type === tk.tag) { - /** - * Serialized token. - * - * @const {string} slice - */ - const slice: string = context.sliceSerialize(token) - - /** - * Next serialized token. - * - * @const {string} next - */ - const next: string = context.sliceSerialize(token.next) - - // @ts-expect-error custom field (2339) - token.tag = next - - // @ts-expect-error custom field (2339) - token.value = slice.slice(next.length + 1, -1).trimStart() - - if (token.next.next) token.next.next.previous = token - token.next = token.next.next - } - } - } - - return events - }, - - /** - * Check if the current character `code` can start this construct. - * - * @see {@linkcode Code} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Code} code - Current character code - * @return {boolean} `true` if `code` can start construct - */ - test(this: TokenizeContext, code: Code): boolean { - return code === codes.leftBrace - }, - - /** - * Set up a state machine to handle character codes streaming in. - * - * @see {@linkcode Effects} - * @see {@linkcode State} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Effects} effects - Context object to transition state machine - * @param {State} ok - Successful tokenization state - * @param {State} nok - Failed tokenization state - * @return {State} Initial state - */ - tokenize( - this: TokenizeContext, - effects: Effects, - ok: State, - nok: State - ): State { - /** - * Tokenize context. - * - * @const {TokenizeContext} self - */ - const self: TokenizeContext = this - - /** - * Closed tag name token? - * - * @var {boolean} name - */ - let name: boolean = false - - return inlineTag - - /** - * Finish inline tag tokenization. - * - * @param {Code} code - Current character code - * @return {State | undefined} Next state - */ - function finish(code: Code): State | undefined { - if (code === codes.eof) return nok(code) - - if (!asciiAlpha(code) && !name) { - name = true - effects.exit(tk.tag) - } - - effects.consume(code) - - if (code === codes.rightBrace && self.previous !== codes.backslash) { - effects.exit(tk.inlineTag) - return ok - } - - return finish - } - - /** - * Tokenize the beginning of an inline tag name (`@`). - * - * @param {Code} code - Current character code - * @return {State | undefined} Next state - */ - function begin(code: Code): State | undefined { - if (code !== codes.at) return nok(code) - return effects.enter(tk.tag), effects.consume(code), finish - } - - /** - * Start inline tag tokenization. - * - * @param {Code} code - Current character code - * @return {State} Next state - */ - function inlineTag(code: Code): State { - assert(code === codes.leftBrace, 'expected `{`') - effects.enter(tk.inlineTag) - return effects.consume(code), begin - } - } -} - -export default inlineTag diff --git a/__tests__/constructs/line-ending.ts b/__tests__/constructs/line-ending.ts new file mode 100644 index 0000000..37af9ff --- /dev/null +++ b/__tests__/constructs/line-ending.ts @@ -0,0 +1,24 @@ +/** + * @file Test Constructs - lineEnding + * @module tests/constructs/lineEnding + */ + +import type { Construct } from '#src/interfaces' +import type { Tokenizer } from '#src/types' +import { resolveSlice } from '#src/utils' +import * as micromark from 'micromark-core-commonmark' +import { markdownLineEnding } from 'micromark-util-character' + +/** + * Line ending construct. + * + * @const {Construct} lineEnding + */ +const lineEnding: Construct = { + name: micromark.lineEnding.name, + resolve: resolveSlice, + test: markdownLineEnding, + tokenize: micromark.lineEnding.tokenize as unknown as Tokenizer +} + +export default lineEnding diff --git a/__tests__/constructs/micromark.ts b/__tests__/constructs/micromark.ts new file mode 100644 index 0000000..c8844ae --- /dev/null +++ b/__tests__/constructs/micromark.ts @@ -0,0 +1,30 @@ +/** + * @file Test Constructs - micromark + * @module tests/constructs/micromark + */ + +import { codes } from '#src/enums' +import type { ConstructRecord } from '#src/interfaces' +import { codeIndented, hardBreakEscape } from 'micromark-core-commonmark' +import codeFenced from './code-fenced' +import codeText from './code-text' +import eof from './eof' +import htmlFlow from './html-flow' + +/** + * Markdown construct record. + * + * @const {ConstructRecord} micromark + */ +const micromark: ConstructRecord = { + [codes.lt]: htmlFlow, + [codes.vht]: codeIndented, + [codes.vs]: codeIndented, + [codes.space]: codeIndented, + [codes.graveAccent]: [codeFenced, codeText], + [codes.tilde]: codeFenced, + [codes.backslash]: hardBreakEscape, + null: [eof] +} as unknown as ConstructRecord + +export default micromark diff --git a/__tests__/constructs/numeric.ts b/__tests__/constructs/numeric.ts deleted file mode 100644 index e16b24a..0000000 --- a/__tests__/constructs/numeric.ts +++ /dev/null @@ -1,142 +0,0 @@ -/** - * @file Test Constructs - numeric - * @module tests/constructs/numeric - */ - -import tk from '#fixtures/tk' -import type { Construct, TokenizeContext } from '#src/interfaces' -import type { Effects, Event, Resolver, State } from '#src/types' -import { resolveSlice } from '#src/utils' -import { chars, codes, type Code } from '@flex-development/vfile-reader' -import { asciiDigit } from 'micromark-util-character' - -/** - * Numeric literal construct. - * - * @const {Construct} numeric - */ -const numeric: Construct = { - /** - * Construct name. - */ - name: 'numeric', - - /** - * Resolve the events parsed by `tokenize`. - * - * @see {@linkcode Resolver} - */ - resolve: resolveSlice, - - /** - * Resolve the events from the start of the content (which may include other - * constructs) to the last one parsed by `tokenize`. - * - * @see {@linkcode Construct.tokenize} - * @see {@linkcode Event} - * - * @param {Event[]} events - List of events - * @return {Event[]} Changed events - */ - resolveTo(events: Event[]): Event[] { - for (const [, token] of events) { - if (token.type !== tk.number) continue - - // @ts-expect-error custom field (2339) - if ((token.value)?.endsWith(chars.lowercaseN)) { - token.type = tk.bigint - } - } - - return events - }, - - /** - * Check if the current character `code` can start this construct. - * - * @see {@linkcode Code} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Code} code - Current character code - * @return {boolean} `true` if `code` can start construct - */ - test(this: TokenizeContext, code: Code): boolean { - return asciiDigit(code) || (code === codes.dot && asciiDigit(this.peek())) - }, - - /** - * Set up a state machine to handle character codes streaming in. - * - * @see {@linkcode Effects} - * @see {@linkcode State} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Effects} effects - Context object to transition state machine - * @param {State} ok - Successful tokenization state - * @return {State} Initial state - */ - tokenize(this: TokenizeContext, effects: Effects, ok: State): State { - /** - * Boolean indicating floating point number. - * - * @var {boolean} float - */ - let float: boolean = this.code === codes.dot - - /** - * Finish tokenizing an integer or float. - * - * @param {Code} code - Current character code - * @return {State | undefined} Next state - */ - const number = (code: Code): State | undefined => { - switch (true) { - case asciiDigit(code): - case code === codes.underscore: - case !float && code === codes.dot: - return float = code === codes.dot, effects.consume(code), number - case !float && code === codes.lowercaseN: - return effects.consume(code), effects.exit(tk.number), ok - default: - return effects.exit(tk.number), ok(code) - } - } - - /** - * Finish numeric literal tokenization. - * - * @param {Code} code - Current character code - * @return {State | undefined} Next state - */ - const finish = (code: Code): State | undefined => { - switch (true) { - // integer or float - case asciiDigit(code): - case code === codes.underscore: - case !float && code === codes.dot: - return float = code === codes.dot, effects.consume(code), number - // bigint - case !float && code === codes.lowercaseN: - return effects.consume(code), effects.exit(tk.number), ok - default: - return effects.exit(tk.number), ok(code) - } - } - - /** - * Start numeric literal tokenization. - * - * @param {Code} code - Current character code - * @return {State} Next state - */ - return function numeric(code: Code): State { - return effects.enter(tk.number), effects.consume(code), finish - } - } -} - -export default numeric diff --git a/__tests__/constructs/punctuator.ts b/__tests__/constructs/punctuator.ts deleted file mode 100644 index eba6466..0000000 --- a/__tests__/constructs/punctuator.ts +++ /dev/null @@ -1,69 +0,0 @@ -/** - * @file Test Constructs - punctuator - * @module tests/constructs/punctuator - */ - -import tk from '#fixtures/tk' -import type { Construct, TokenizeContext } from '#src/interfaces' -import type { Effects, Guard, Resolver, State } from '#src/types' -import { resolveSlice } from '#src/utils' -import type { Code } from '@flex-development/vfile-reader' -import { unicodePunctuation as test } from 'micromark-util-character' - -/** - * Punctuator construct. - * - * @const {Construct} punctuator - */ -const punctuator: Construct = { - /** - * Construct name. - */ - name: tk.punctuator, - - /** - * Resolve the events parsed by `tokenize`. - * - * @see {@linkcode Resolver} - */ - resolve: resolveSlice, - - /** - * Check if the current character code can start this construct. - * - * @see {@linkcode Guard} - */ - test, - - /** - * Set up a state machine to handle character codes streaming in. - * - * @see {@linkcode Effects} - * @see {@linkcode State} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Effects} effects - Context object to transition state machine - * @param {State} ok - Successful tokenization state - * @return {State} Initial state - */ - tokenize(this: TokenizeContext, effects: Effects, ok: State): State { - return punctuator - - /** - * Tokenize a punctuator. - * - * @param {Code} code - Current character code - * @return {State} Next state - */ - function punctuator(code: Code): State { - effects.enter(tk.punctuator) - effects.consume(code) - effects.exit(tk.punctuator) - return ok - } - } -} - -export default punctuator diff --git a/__tests__/constructs/string.ts b/__tests__/constructs/string.ts deleted file mode 100644 index 2f6548d..0000000 --- a/__tests__/constructs/string.ts +++ /dev/null @@ -1,135 +0,0 @@ -/** - * @file Test Constructs - string - * @module tests/constructs/string - */ - -import tk from '#fixtures/tk' -import type { Construct, TokenizeContext } from '#src/interfaces' -import type { Effects, Event, Guard, State } from '#src/types' -import { codes, type Code } from '@flex-development/vfile-reader' -import { ok as assert } from 'devlop' - -/** - * Check if the current character `code` can start this construct. - * - * @see {@linkcode Code} - * - * @this {TokenizeContext} - * - * @param {Code} code - Current character code - * @return {boolean} `true` if `code` can start construct - */ -function test(this: TokenizeContext, code: Code): boolean { - return code === codes.apostrophe || code === codes.quotation -} - -/** - * String literal construct. - * - * @const {Construct} string - */ -const string: Construct = { - /** - * Construct name. - */ - name: tk.string, - - /** - * Check if the previous character `code` can come before this construct. - * - * @see {@linkcode Code} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Code} code - Previous character code - * @return {boolean} `true` if `code` allowed before construct - */ - previous(this: TokenizeContext, code: Code): boolean { - return code !== codes.backslash - }, - - /** - * Resolve the events parsed by `tokenize`. - * - * @see {@linkcode Construct.tokenize} - * @see {@linkcode Event} - * @see {@linkcode TokenizeContext} - * - * @param {Event[]} events - List of events - * @param {TokenizeContext} context - Tokenize context - * @return {Event[]} Changed events - */ - resolve(events: Event[], context: TokenizeContext): Event[] { - assert(events.length === 2, 'expected events') - const [[, token]] = <[Event]>events - - assert(token.type === tk.string, 'expected string token') - // @ts-expect-error custom field (2339) - token.value = context.sliceSerialize(token) - - return events - }, - - /** - * Check if the current character code can start this construct. - * - * @see {@linkcode Guard} - */ - test, - - /** - * Set up a state machine to handle character codes streaming in. - * - * @see {@linkcode Effects} - * @see {@linkcode State} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Effects} effects - Context object to transition state machine - * @param {State} ok - Successful tokenization state - * @return {State} Initial state - */ - tokenize(this: TokenizeContext, effects: Effects, ok: State): State { - /** - * Tokenize context. - * - * @const {TokenizeContext} self - */ - const self: TokenizeContext = this - - return string - - /** - * Finish string literal tokenization. - * - * @param {Code} code - Current character code - * @return {State | undefined} Next state - */ - function finish(code: Code): State | undefined { - switch (true) { - case code === codes.eof: - return effects.exit(tk.string), ok(code) - default: - effects.consume(code) - if (test.call(self, code)) return effects.exit(tk.string), ok - return finish - } - } - - /** - * Start string literal tokenization. - * - * @param {Code} code - Current character code - * @return {State} Next state - */ - function string(code: Code): State { - effects.enter(tk.string) - effects.consume(code) - return finish - } - } -} - -export default string diff --git a/__tests__/constructs/type-metadata.ts b/__tests__/constructs/type-metadata.ts new file mode 100644 index 0000000..d716d45 --- /dev/null +++ b/__tests__/constructs/type-metadata.ts @@ -0,0 +1,127 @@ +/** + * @file Test Constructs - typeMetadata + * @module tests/constructs/typeMetadata + */ + +import tk from '#fixtures/tt' +import { codes } from '#src/enums' +import type { Construct, Effects, TokenizeContext } from '#src/interfaces' +import type { Code, State } from '#src/types' +import { ok as assert } from 'devlop' + +/** + * Type metadata construct. + * + * @const {Construct} typeMetadata + */ +const typeMetadata: Construct = { + name: tk.typeMetadata, + partial: true, + previous, + test, + tokenize +} + +export default typeMetadata + +/** + * Check if the previous character `code` can come before this construct. + * + * @see {@linkcode Code} + * @see {@linkcode TokenizeContext} + * + * @this {TokenizeContext} + * + * @param {Code} code - Previous character code + * @return {boolean} `true` if `code` allowed before construct + */ +function previous(this: TokenizeContext, code: Code): boolean { + return code !== codes.backslash +} + +/** + * Check if the current character `code` can start this construct. + * + * @see {@linkcode Code} + * @see {@linkcode TokenizeContext} + * + * @this {TokenizeContext} + * + * @param {Code} code - Current character code + * @return {boolean} `true` if `code` can start construct + */ +function test(this: TokenizeContext, code: Code): boolean { + return code === codes.leftBrace +} + +/** + * Set up a state machine to handle character codes streaming in. + * + * @see {@linkcode Effects} + * @see {@linkcode State} + * @see {@linkcode TokenizeContext} + * + * @this {TokenizeContext} + * + * @param {Effects} effects - Context object to transition state machine + * @param {State} ok - Successful tokenization state + * @param {State} nok - Failed tokenization state + * @return {State} Initial state + */ +function tokenize( + this: TokenizeContext, + effects: Effects, + ok: State, + nok: State +): State { + /** + * Tokenize context. + * + * @const {TokenizeContext} self + */ + const self: TokenizeContext = this + + return typeMetadata + + /** + * Inside type metadata. + * + * @example + * ```markdown + * > {{ id: string }} + * ^^^^^^^^^^^^^ + * ``` + * + * @param {{ id: string }} code - Current character code + * @return {State | undefined} Next state + */ + function inside(code: Code): State | undefined { + if (code === codes.eof) return nok(code) + + if (code === codes.rightBrace && self.next !== code) { + effects.consume(code) + effects.exit(tk.typeMetadata) + return ok + } + + effects.consume(code) + return inside + } + + /** + * At start of type metadata. + * + * @example + * ```markdown + * > {{ id: string }} + * ^ + * ``` + * + * @param {Code} code - Current character code + * @return {State | undefined} Next state + */ + function typeMetadata(code: Code): State | undefined { + assert(code === codes.leftBrace, 'expected `{`') + return effects.enter(tk.typeMetadata), effects.consume(code), inside + } +} diff --git a/__tests__/constructs/ws.ts b/__tests__/constructs/ws.ts deleted file mode 100644 index ca7de52..0000000 --- a/__tests__/constructs/ws.ts +++ /dev/null @@ -1,93 +0,0 @@ -/** - * @file Test Constructs - ws - * @module tests/constructs/ws - */ - -import tk from '#fixtures/tk' -import type { Construct, TokenizeContext } from '#src/interfaces' -import type { Effects, Resolver, State } from '#src/types' -import { resolveSlice } from '#src/utils' -import type { Code, CodeCheck } from '@flex-development/vfile-reader' -import { ok as assert } from 'devlop' - -/** - * Code check. - * - * @var {CodeCheck} check - */ -let check: CodeCheck - -/** - * Whitespace construct. - * - * @const {Construct} ws - */ -const ws: Construct = { - /** - * Construct name. - */ - name: tk.whitespace, - - /** - * Resolve the events parsed by `tokenize`. - * - * @see {@linkcode Resolver} - */ - resolve: resolveSlice, - - /** - * Check if the current character `code` can start this construct. - * - * @see {@linkcode Code} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Code} code - Current character code - * @return {boolean} `true` if `code` can start construct - */ - test(this: TokenizeContext, code: Code): boolean { - return (check = this.check(/\s/))(code) - }, - - /** - * Set up a state machine to handle character codes streaming in. - * - * @see {@linkcode Effects} - * @see {@linkcode State} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Effects} effects - Context object to transition state machine - * @param {State} ok - Successful tokenization state - * @return {State} Initial state - */ - tokenize(this: TokenizeContext, effects: Effects, ok: State): State { - return whitespace - - /** - * Finish tokenizing whitespace. - * - * @param {Code} code - Current character code - * @return {State | undefined} Next state - */ - function finish(code: Code): State | undefined { - assert(check, 'expected code check') - if (!check(code)) return effects.exit(tk.whitespace), ok(code) - return effects.consume(code), finish - } - - /** - * Start whitespace tokenization. - * - * @param {Code} code - Current character code - * @return {State} Next state - */ - function whitespace(code: Code): State { - return effects.enter(tk.whitespace), effects.consume(code), finish - } - } -} - -export default ws diff --git a/__tests__/interfaces/index.ts b/__tests__/interfaces/index.ts deleted file mode 100644 index 06686f3..0000000 --- a/__tests__/interfaces/index.ts +++ /dev/null @@ -1,6 +0,0 @@ -/** - * @file Entry Point - Test Interfaces - * @module tests/interfaces - */ - -export type { default as MockInstance } from './mock-instance' diff --git a/__tests__/interfaces/mock-instance.ts b/__tests__/interfaces/mock-instance.ts deleted file mode 100644 index 132a1e3..0000000 --- a/__tests__/interfaces/mock-instance.ts +++ /dev/null @@ -1,19 +0,0 @@ -/** - * @file Test Interfaces - MockInstance - * @module tests/interfaces/MockInstance - */ - -import type { Fn } from '@flex-development/tutils' -import type * as vitest from 'vitest' - -/** - * {@linkcode vitest.MockInstance} utility. - * - * @template F - Function being mocked - * - * @extends {vitest.Mock,ReturnType>} - */ -interface MockInstance - extends vitest.MockInstance, ReturnType> {} - -export type { MockInstance as default } diff --git a/__tests__/utils/finalize-context.ts b/__tests__/utils/finalize-context.ts new file mode 100644 index 0000000..fa6146e --- /dev/null +++ b/__tests__/utils/finalize-context.ts @@ -0,0 +1,35 @@ +/** + * @file Test Utilities - finalizeContext + * @module tests/utils/isPoint + */ + +import type { TokenizeContext } from '#src/interfaces' +import type Lexer from '#src/lexer' + +/** + * Finalize the tokenize `context`. + * + * @see {@linkcode Lexer} + * @see {@linkcode TokenizeContext} + * + * @this {Lexer} + * + * @param {TokenizeContext} context - Base tokenize context + * @return {undefined} Nothing + */ +function finalizeContext(this: Lexer, context: TokenizeContext): undefined { + // @ts-expect-error this is a custom field, which users are supposed to + // manually type, but the runtime should just support it (2339). + context.parser = { + constructs: { disable: { null: [] } }, + defined: [], + lazy: {} + } + + // @ts-expect-error custom field, same as above (2339). + this.place._bufferIndex = -1 + + return void context +} + +export default finalizeContext diff --git a/__tests__/utils/inspect.ts b/__tests__/utils/inspect.ts index 97636ba..eb36612 100644 --- a/__tests__/utils/inspect.ts +++ b/__tests__/utils/inspect.ts @@ -6,63 +6,27 @@ import type { Token } from '#src/interfaces' import { omit } from '@flex-development/tutils' import { u } from '@flex-development/unist-util-builder' -import * as i from '@flex-development/unist-util-inspect' -import type { Literal, Node } from 'unist' +import { inspectNoColor } from '@flex-development/unist-util-inspect' +import list from './list' export default inspect /** * Inspect a token list. * - * @see {@linkcode i.Options} * @see {@linkcode Token} * * @param {Token} token - Head token - * @param {(i.Options | null)?} [options] - Configuration options * @return {string} Pretty printed token list */ -function inspect(token: Token, options?: i.Options | null): string { - return i.inspectNoColor(u('tokens', nodes(token)), options) -} - -/** - * Convert a token to a list of nodes. - * - * @internal - * - * @param {Token} token - Head token - * @return {(Literal | Node)[]} Node list - */ -function nodes(token: Token): (Literal | Node)[] { - /** - * Node list. - * - * @const {(Literal | Node)[]} list - */ - const list: (Literal | Node)[] = [] - - /** - * Current token. - * - * @var {Token | undefined} tok - */ - let tok: Token | undefined = token - - // build list - while (tok) { - /** - * New node. - * - * @const {Literal | Node} node - */ - const node: Literal | Node = u(tok.type, { - ...omit(tok, ['end', 'next', 'previous', 'start']), - position: { end: tok.end, start: tok.start } +function inspect(token: Token): string { + return inspectNoColor(u('tokens', list(token).map(token => { + return u(token.type, { + ...omit(Object.assign({}, token), ['end', 'next', 'previous', 'start']), + position: { + end: Object.assign({}, token.end), + start: Object.assign({}, token.start) + } }) - - list.push(node) - tok = tok.next - } - - return list + }))) } diff --git a/__tests__/utils/is-point.ts b/__tests__/utils/is-point.ts index fc75dbf..fcfc327 100644 --- a/__tests__/utils/is-point.ts +++ b/__tests__/utils/is-point.ts @@ -3,7 +3,7 @@ * @module tests/utils/isPoint */ -import type { Point } from '@flex-development/vfile-reader' +import type { Point } from '@flex-development/vfile-location' /** * Check if the specified `value` is a point. diff --git a/__tests__/utils/list.ts b/__tests__/utils/list.ts new file mode 100644 index 0000000..a03799c --- /dev/null +++ b/__tests__/utils/list.ts @@ -0,0 +1,47 @@ +/** + * @file Test Utilities - list + * @module tests/utils/list + */ + +import { ev } from '#src/enums' +import type { Token } from '#src/interfaces' +import type { Event } from '#src/types' +import { isArray } from '@flex-development/tutils' + +/** + * Convert a list of events or linked token list to a flat token list. + * + * @see {@linkcode Event} + * + * @template {Event[] | Token} T - Event list or head token + * + * @param {Event[] | Token | undefined} data - Event list or head token + * @return {Token[]} Flat token list + */ +function list(data: T | undefined): Token[] { + /** + * Flat token list. + * + * @const {Token[]} tokens + */ + const tokens: Token[] = [] + + if (isArray(data)) { + for (const [event, token] of data) { + if (event === ev.enter) tokens.push(token) + } + } else { + /** + * Current token. + * + * @var {Token | undefined} token + */ + let token: Token | undefined = data + + while (token) void (tokens.push(token), token = token.next) + } + + return tokens +} + +export default list diff --git a/__tests__/utils/token.ts b/__tests__/utils/token.ts deleted file mode 100644 index f2eb28d..0000000 --- a/__tests__/utils/token.ts +++ /dev/null @@ -1,31 +0,0 @@ -/** - * @file Test Utilities - token - * @module tests/utils/token - */ - -import type { Token } from '#src/interfaces' -import type { TokenFields, TokenType } from '#src/types' - -/** - * Token factory. - * - * @see {@linkcode TokenFields} - * @see {@linkcode TokenType} - * @see {@linkcode Token} - * - * @param {TokenType} type - Token type - * @param {TokenFields} fields - Token fields - * @return {Token} New token - */ -function token(type: TokenType, fields: TokenFields): Token { - return Object.defineProperties({ - end: fields.end, - start: fields.start, - type - }, { - next: { enumerable: false, writable: true }, - previous: { enumerable: false, writable: true } - }) -} - -export default token diff --git a/eslint.base.config.mjs b/eslint.base.config.mjs index 7201305..dd1c9e7 100644 --- a/eslint.base.config.mjs +++ b/eslint.base.config.mjs @@ -1043,7 +1043,8 @@ export default [ 'unicorn/no-useless-undefined': 0, 'unicorn/prefer-at': 0, 'unicorn/prefer-dom-node-append': 0, - 'unicorn/string-content': 0 + 'unicorn/string-content': 0, + 'unicorn/text-encoding-identifier-case': 0 } }, { diff --git a/eslint.config.mjs b/eslint.config.mjs index 8b2bb78..c278336 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -18,19 +18,25 @@ export default [ '!**/typings/**/dist/', '**/.yarn/', '**/coverage/', - '**/dist/', - '__fixtures__/underscore-1.5.2.js' + '**/dist/' ] }, { - files: [ - '__tests__/constructs/inline-tag.ts', - '__tests__/constructs/string.ts', - 'src/constructs/initialize.ts', - 'src/lexer.ts' - ], + files: ['__tests__/constructs/type-metadata.ts', 'src/lexer.ts'], rules: { 'unicorn/no-this-assignment': 0 } + }, + { + files: ['src/enums/codes.ts'], + rules: { + 'sort-keys': 0 + } + }, + { + files: ['src/types/encoding.ts'], + rules: { + 'unicorn/text-encoding-identifier-case': 0 + } } ] diff --git a/package.json b/package.json index 539bc8b..85246b4 100644 --- a/package.json +++ b/package.json @@ -73,21 +73,20 @@ "typecheck:watch": "vitest --mode=typecheck --typecheck" }, "dependencies": { + "@flex-development/unist-util-builder": "2.0.0", "@flex-development/unist-util-types": "1.6.1", - "@flex-development/vfile-reader": "3.1.2", + "@flex-development/vfile-location": "1.1.0", "@types/debug": "4.1.12", - "@types/unist": "3.0.2", "debug": "4.3.5", "devlop": "1.1.0", - "micromark-util-chunked": "2.0.0", - "vfile": "6.0.1" + "micromark-util-chunked": "2.0.0" }, "devDependencies": { "@arethetypeswrong/cli": "0.15.3", "@commitlint/cli": "19.3.0", "@commitlint/types": "19.0.3", "@eslint/js": "9.5.0", - "@faker-js/faker": "9.0.0-alpha.0", + "@faker-js/faker": "9.0.0-rc.0", "@flex-development/commitlint-config": "1.0.1", "@flex-development/decorator-regex": "2.0.0", "@flex-development/esm-types": "2.0.0", @@ -96,18 +95,18 @@ "@flex-development/mlly": "1.0.0-alpha.18", "@flex-development/pathe": "2.0.0", "@flex-development/tutils": "6.0.0-alpha.25", - "@flex-development/unist-util-builder": "2.0.0", - "@flex-development/unist-util-inspect": "1.0.0", + "@flex-development/unist-util-inspect": "1.0.1", "@stylistic/eslint-plugin": "2.2.2", "@types/chai": "4.3.16", "@types/eslint": "8.56.10", "@types/eslint__js": "8.42.3", "@types/is-ci": "3.0.4", - "@types/node": "20.14.6", + "@types/node": "20.14.11", "@types/node-notifier": "8.0.5", + "@types/unist": "3.0.2", "@vates/toggle-scripts": "1.0.0", - "@vitest/coverage-v8": "2.0.0-beta.11", - "@vitest/ui": "2.0.0-beta.11", + "@vitest/coverage-v8": "2.0.4", + "@vitest/ui": "2.0.4", "chai": "5.1.1", "convert-hrtime": "5.0.0", "cross-env": "7.0.3", @@ -135,6 +134,7 @@ "is-ci": "3.0.1", "jsonc-eslint-parser": "2.4.0", "lint-staged": "15.2.7", + "micromark-core-commonmark": "2.0.1", "micromark-util-character": "2.1.0", "node-notifier": "10.0.1", "prettier": "3.3.2", @@ -148,8 +148,9 @@ "ts-dedent": "2.2.0", "typescript": "5.5.2", "typescript-eslint": "8.0.0-alpha.30", + "vfile": "6.0.2", "vite-tsconfig-paths": "4.3.2", - "vitest": "2.0.0-beta.11", + "vitest": "2.0.4", "yaml-eslint-parser": "1.2.3" }, "resolutions": { diff --git a/src/__snapshots__/tokenize.integration.snap b/src/__snapshots__/tokenize.integration.snap index 3f999ce..bbb2c40 100644 --- a/src/__snapshots__/tokenize.integration.snap +++ b/src/__snapshots__/tokenize.integration.snap @@ -1,102 +1,114 @@ // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`integration:tokenize > default constructs > should tokenize empty file 1`] = ` -tokens[2] -├─0 sof (1:1-1:1, 0-0) -└─1 eof (1:1-1:1, 0-0) -`; - -exports[`integration:tokenize > default constructs > should tokenize non-empty file 1`] = ` -tokens[2] -├─0 sof (1:1-1:1, 0-0) -└─1 eof (22:1-22:1, 304-304) -`; - -exports[`integration:tokenize > user constructs > non-empty file > sample 0 1`] = ` +exports[`integration:tokenize > should work with constructs (0) 1`] = ` tokens[3] -├─0 sof (1:1-1:1, 0-0) -├─1 inlineTag "Code" (1:1-1:17, 0-16) -│ tag: "@linkcode" -└─2 eof (2:1-2:1, 17-17) +├─0 lineEnding "\\n" (1:1-2:1, 0-1) +├─1 lineEnding "\\r" (2:1-3:1, 1-2) +└─2 lineEnding "\\r\\n" (3:1-4:1, 2-4) `; -exports[`integration:tokenize > user constructs > non-empty file > sample 1 1`] = ` -tokens[13] -├─0 sof (1:1-1:1, 0-0) -├─1 punctuator "=" (1:10-1:11, 9-10) -├─2 string "\\"hello 👋\\"" (1:12-1:21, 11-20) -├─3 punctuator ";" (1:21-1:22, 20-21) -├─4 punctuator "." (2:8-2:9, 29-30) -├─5 punctuator "(" (2:12-2:13, 33-34) -├─6 punctuator "\\\\" (2:13-2:14, 34-35) -├─7 punctuator "\\\\" (2:19-2:20, 40-41) -├─8 punctuator ")" (2:25-2:26, 46-47) -├─9 punctuator ";" (2:26-2:27, 47-48) -├─10 punctuator "/" (2:28-2:29, 49-50) -├─11 punctuator "/" (2:29-2:30, 50-51) -└─12 eof (3:1-3:1, 60-60) +exports[`integration:tokenize > should work with constructs (1) 1`] = ` +tokens[1] +└─0 typeMetadata "{{ id: string }}" (1:1-1:17, 0-16) `; -exports[`integration:tokenize > user constructs > non-empty file > sample 2 1`] = ` -tokens[8] -├─0 sof (1:1-1:1, 0-0) -├─1 string "'😍'" (1:1-1:4, 0-3) -├─2 string "\\"👍\\"" (2:1-2:4, 4-7) -├─3 punctuator "\\\\" (3:1-3:2, 8-9) -├─4 punctuator "'" (3:2-3:3, 9-10) -├─5 punctuator "\\\\" (3:4-3:5, 11-12) -├─6 punctuator "'" (3:5-3:6, 12-13) -└─7 eof (4:1-4:1, 14-14) +exports[`integration:tokenize > should work with constructs (2) 1`] = ` +tokens[21] +├─0 codeIndented " indented code\\n\\n\\n more indented code" (1:1-4:23, 0-42) +├─1 linePrefix " " (1:1-1:5, 0-4) +├─2 codeFlowValue "indented code" (1:5-1:18, 4-17) +├─3 lineEnding "\\n" (1:18-2:1, 17-18) +├─4 lineEnding "\\n" (2:1-3:1, 18-19) +├─5 lineEnding "\\n" (3:1-4:1, 19-20) +├─6 linePrefix " " (4:1-4:5, 20-24) +├─7 codeFlowValue "more indented code" (4:5-4:23, 24-42) +├─8 codeIndented " more\\n indent" (7:1-8:14, 62-86) +├─9 linePrefix " " (7:1-7:5, 62-66) +├─10 codeFlowValue " more" (7:5-7:11, 66-72) +├─11 lineEnding "\\n" (7:11-8:1, 72-73) +├─12 linePrefix " " (8:1-8:5, 73-77) +├─13 codeFlowValue " indent" (8:5-8:14, 77-86) +├─14 codeIndented " \\tand mixed with spaces\\n\\t extra spaces" (13:1-14:17, 105-145) +├─15 linePrefix " \\t" (13:1-13:5, 105-108) +├─16 codeFlowValue "and mixed with spaces" (13:5-13:26, 108-129) +├─17 lineEnding "\\n" (13:26-14:1, 129-130) +├─18 linePrefix "\\t " (14:1-14:5, 130-133) +├─19 codeFlowValue "extra spaces" (14:5-14:17, 133-145) +└─20 eof (19:1-19:1, 165-165) `; -exports[`integration:tokenize > user constructs > non-empty file > sample 3 1`] = ` -tokens[42] -├─0 sof (1:1-1:1, 0-0) -├─1 number "0" (1:1-1:2, 0-1) -├─2 whitespace "\\n" (1:2-2:1, 1-2) -├─3 bigint "0n" (2:1-2:3, 2-4) -├─4 whitespace "\\n" (2:3-3:1, 4-5) -├─5 number "1" (3:1-3:2, 5-6) -├─6 whitespace "\\n" (3:2-4:1, 6-7) -├─7 bigint "1n" (4:1-4:3, 7-9) -├─8 whitespace "\\n" (4:3-5:1, 9-10) -├─9 number "2" (5:1-5:2, 10-11) -├─10 whitespace "\\n" (5:2-6:1, 11-12) -├─11 bigint "2n" (6:1-6:3, 12-14) -├─12 whitespace "\\n" (6:3-7:1, 14-15) -├─13 number "3" (7:1-7:2, 15-16) -├─14 whitespace "\\n" (7:2-8:1, 16-17) -├─15 bigint "3n" (8:1-8:3, 17-19) -├─16 whitespace "\\n" (8:3-9:1, 19-20) -├─17 number "4" (9:1-9:2, 20-21) -├─18 whitespace "\\n" (9:2-10:1, 21-22) -├─19 bigint "4n" (10:1-10:3, 22-24) -├─20 whitespace "\\n" (10:3-11:1, 24-25) -├─21 number "5" (11:1-11:2, 25-26) -├─22 whitespace "\\n" (11:2-12:1, 26-27) -├─23 bigint "5n" (12:1-12:3, 27-29) -├─24 whitespace "\\n" (12:3-13:1, 29-30) -├─25 number "6" (13:1-13:2, 30-31) -├─26 whitespace "\\n" (13:2-14:1, 31-32) -├─27 bigint "6n" (14:1-14:3, 32-34) -├─28 whitespace "\\n" (14:3-15:1, 34-35) -├─29 number "7" (15:1-15:2, 35-36) -├─30 whitespace "\\n" (15:2-16:1, 36-37) -├─31 bigint "7n" (16:1-16:3, 37-39) -├─32 whitespace "\\n" (16:3-17:1, 39-40) -├─33 number "8" (17:1-17:2, 40-41) -├─34 whitespace "\\n" (17:2-18:1, 41-42) -├─35 bigint "8n" (18:1-18:3, 42-44) -├─36 whitespace "\\n" (18:3-19:1, 44-45) -├─37 number "9" (19:1-19:2, 45-46) -├─38 whitespace "\\n" (19:2-20:1, 46-47) -├─39 bigint "9n" (20:1-20:3, 47-49) -├─40 whitespace "\\n" (20:3-21:1, 49-50) -└─41 eof (21:1-21:1, 50-50) +exports[`integration:tokenize > should work with constructs (3) 1`] = ` +tokens[47] +├─0 codeText "\`a\`" (1:28-1:31, 27-30) +├─1 codeTextSequence "\`" (1:28-1:29, 27-28) +├─2 codeTextData "a" (1:29-1:30, 28-29) +├─3 codeTextSequence "\`" (1:30-1:31, 29-30) +├─4 codeText "\` b\`" (1:33-1:37, 32-36) +├─5 codeTextSequence "\`" (1:33-1:34, 32-33) +├─6 codeTextData " b" (1:34-1:36, 33-35) +├─7 codeTextSequence "\`" (1:36-1:37, 35-36) +├─8 codeText "\`c \`" (1:39-1:43, 38-42) +├─9 codeTextSequence "\`" (1:39-1:40, 38-39) +├─10 codeTextData "c " (1:40-1:42, 39-41) +├─11 codeTextSequence "\`" (1:42-1:43, 41-42) +├─12 codeText "\` d \`" (1:45-1:50, 44-49) +├─13 codeTextSequence "\`" (1:45-1:46, 44-45) +├─14 codeTextPadding " " (1:46-1:47, 45-46) +├─15 codeTextData "d" (1:47-1:48, 46-47) +├─16 codeTextPadding " " (1:48-1:49, 47-48) +├─17 codeTextSequence "\`" (1:49-1:50, 48-49) +├─18 codeText "\` e\\n\`" (1:52-2:2, 51-56) +├─19 codeTextSequence "\`" (1:52-1:53, 51-52) +├─20 codeTextPadding " " (1:53-1:54, 52-53) +├─21 codeTextData "e" (1:54-1:55, 53-54) +├─22 codeTextPadding "\\n" (1:55-2:1, 54-55) +├─23 codeTextSequence "\`" (2:1-2:2, 55-56) +├─24 codeText "\` f\\n\`" (2:4-3:2, 58-63) +├─25 codeTextSequence "\`" (2:4-2:5, 58-59) +├─26 codeTextPadding " " (2:5-2:6, 59-60) +├─27 codeTextData "f" (2:6-2:7, 60-61) +├─28 codeTextPadding "\\n" (2:7-3:1, 61-62) +├─29 codeTextSequence "\`" (3:1-3:2, 62-63) +├─30 codeText "\`g\\n\`" (3:4-4:2, 65-69) +├─31 codeTextSequence "\`" (3:4-3:5, 65-66) +├─32 codeTextData "g" (3:5-3:6, 66-67) +├─33 lineEnding "\\n" (3:6-4:1, 67-68) +├─34 codeTextSequence "\`" (4:1-4:2, 68-69) +├─35 codeText "\`\\nh\`" (4:4-5:3, 71-75) +├─36 codeTextSequence "\`" (4:4-4:5, 71-72) +├─37 lineEnding "\\n" (4:5-5:1, 72-73) +├─38 codeTextData "h" (5:1-5:2, 73-74) +├─39 codeTextSequence "\`" (5:2-5:3, 74-75) +├─40 codeText "\`alpha bravo charlie\\n\\tdelta echo\`" (7:6-8:14, 83-116) +├─41 codeTextSequence "\`" (7:6-7:7, 83-84) +├─42 codeTextData "alpha bravo charlie" (7:7-7:26, 84-103) +├─43 lineEnding "\\n" (7:26-8:1, 103-104) +├─44 codeTextData "\\tdelta echo" (8:1-8:13, 104-115) +├─45 codeTextSequence "\`" (8:13-8:14, 115-116) +└─46 eof (9:1-9:1, 118-118) `; -exports[`integration:tokenize > user constructs > should tokenize empty file 1`] = ` -tokens[2] -├─0 sof (1:1-1:1, 0-0) -└─1 eof (1:1-1:1, 0-0) +exports[`integration:tokenize > should work with constructs (4) 1`] = ` +tokens[21] +├─0 htmlFlow "" (1:1-3:10, 0-20) +├─1 htmlFlowData "" (3:1-3:10, 11-20) +├─6 htmlFlow "" (5:1-5:9, 22-30) +├─7 htmlFlowData "" (5:1-5:9, 22-30) +├─8 htmlFlow "" (7:1-7:6, 32-37) +├─9 htmlFlowData "" (7:1-7:6, 32-37) +├─10 htmlFlow "" (9:1-9:5, 39-43) +├─11 htmlFlowData "" (9:1-9:5, 39-43) +├─12 htmlFlow "" (11:1-11:14, 45-58) +├─13 htmlFlowData "" (11:1-11:14, 45-58) +├─14 htmlFlow "\\n" (16:1-17:1, 68-72) +├─18 htmlFlowData "" (16:1-16:4, 68-71) +├─19 lineEnding "\\n" (16:4-17:1, 71-72) +└─20 eof (17:1-17:1, 72-72) `; diff --git a/src/__tests__/index.e2e.spec.ts b/src/__tests__/index.e2e.spec.ts index bd48ffa..23090dd 100644 --- a/src/__tests__/index.e2e.spec.ts +++ b/src/__tests__/index.e2e.spec.ts @@ -11,11 +11,13 @@ describe('e2e:vfile-lexer', () => { 'Lexer', 'chars', 'codes', - 'eof', 'ev', 'initialize', + 'isLineEnding', + 'preprocess', 'resolveAll', 'resolveSlice', + 'resolveTokenList', 'tokenize' ]) }) diff --git a/src/__tests__/tokenize.integration.spec.ts b/src/__tests__/tokenize.integration.spec.ts index 7827b81..5e95d98 100644 --- a/src/__tests__/tokenize.integration.spec.ts +++ b/src/__tests__/tokenize.integration.spec.ts @@ -3,74 +3,79 @@ * @module vfile-lexer/tests/integration/tokenize */ -import tk from '#fixtures/tk' +import tt from '#fixtures/tt' import { initialize } from '#src/constructs' -import type { Options } from '#src/interfaces' -import { inlineTag, numeric, punctuator, string, ws } from '#tests/constructs' -import token from '#tests/utils/token' -import { identity } from '@flex-development/tutils' -import { codes } from '@flex-development/vfile-reader' +import { chars } from '#src/enums' +import type { + Encoding, + FileLike, + TokenizeOptions, + Value +} from '#src/types' +import { resolveSlice, resolveTokenList } from '#src/utils' +import { + codeFenced, + codeText, + eof, + lineEnding, + micromark, + typeMetadata +} from '#tests/constructs' +import finalizeContext from '#tests/utils/finalize-context' +import list from '#tests/utils/list' import { readSync as read } from 'to-vfile' -import type { VFile, Value } from 'vfile' -import type Lexer from '../lexer' import testSubject from '../tokenize' describe('integration:tokenize', () => { - let run: (file: Value | VFile, options?: Partial | null) => Lexer + it.each<[ + value?: FileLike | Value | null | undefined, + options?: TokenizeOptions | null | undefined + ]>([ + [], + [read('__fixtures__/markdown/empty.md')], + [read('__fixtures__/markdown/code-fenced.md'), { tabSize: 2 }] + ])('should work without constructs (%#)', (value, options) => { + // Act + const result = testSubject(value, options) - beforeAll(() => { - run = (file, options) => testSubject(file, { ...options, token }) + // Expect + expect(result).to.be.an('array').that.is.empty + expect(list(result[0]?.[1])).to.have.ordered.members(list(result)) }) - describe('default constructs', () => { - it('should tokenize empty file', () => { - expect(run('', { constructs: [ws] }).head).toMatchSnapshot() - }) - - it('should tokenize non-empty file', () => { - expect(run(read('__fixtures__/tk.ts')).head).toMatchSnapshot() - }) - }) + it.each<[ + value: FileLike | Value, + encoding: Encoding | null | undefined, + options: TokenizeOptions + ]>([ + [chars.lf + chars.cr + chars.crlf, null, { constructs: lineEnding }], + [read('__fixtures__/type-metadata.txt'), 'utf8', { + disabled: [tt.eof], + initialize: Object.assign(initialize([typeMetadata, eof]), { + resolveAll: resolveSlice + }) + }], + [read('__fixtures__/markdown/code-indented.md'), null, { + constructs: micromark, + finalizeContext + }], + [read('__fixtures__/markdown/code-text.md'), 'utf8', { + constructs: [codeFenced, codeText, eof] + }], + [read('__fixtures__/markdown/html-flow.md'), null, { + constructs: micromark, + finalizeContext + }] + ])('should work with constructs (%#)', (value, encoding, options) => { + // Arrange + options.resolvers = [resolveTokenList] - describe('user constructs', () => { - it('should tokenize empty file', () => { - expect(run('', { - constructs: [ - inlineTag, - numeric, - punctuator, - string, - ws - ] - }).head).toMatchSnapshot() - }) + // Act + const result = testSubject(value, encoding, options) - describe('non-empty file', () => { - it.each<[VFile, (Partial | null | undefined)?]>([ - [read('__fixtures__/inline-tag.txt'), { - constructs: { - [codes.cr]: ws, - [codes.leftBrace]: inlineTag, - [codes.lf]: ws, - [codes.space]: ws - }, - context: vi.fn(), - disabled: [tk.whitespace] - }], - [read('__fixtures__/hello.txt'), { - constructs: [string, punctuator], - context: vi.fn(identity), - initialize: Object.assign(initialize([string, punctuator]), { - resolveAll: vi.fn(identity) - }) - }], - [read('__fixtures__/strings.txt'), { - constructs: [string, punctuator] - }], - [read('__fixtures__/numerics.txt'), { constructs: [numeric, ws] }] - ])('sample %#', (file, options) => { - expect(run(file, options).head).toMatchSnapshot() - }) - }) + // Expect + expect(result).to.be.an('array').that.is.not.empty + expect(list(result[0]?.[1])).to.have.ordered.members(list(result)) + expect(result[0]![1]).toMatchSnapshot() }) }) diff --git a/src/constructs/eof.ts b/src/constructs/eof.ts deleted file mode 100644 index fbedcf6..0000000 --- a/src/constructs/eof.ts +++ /dev/null @@ -1,68 +0,0 @@ -/** - * @file Constructs - eof - * @module vfile-lexer/constructs/eof - */ - -import { tt } from '#src/enums' -import type { Construct, TokenizeContext } from '#src/interfaces' -import type { Effects, State } from '#src/types' -import { codes, type Code } from '@flex-development/vfile-reader' - -/** - * End of file construct. - * - * @const {Construct} eof - */ -const eof: Construct = { - /** - * Construct name. - */ - name: `vfile-lexer:${tt.eof}`, - - /** - * Check if the current character `code` can start this construct. - * - * @see {@linkcode Code} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Code} code - Current character code - * @return {boolean} `true` if `code` can start construct - */ - test(this: TokenizeContext, code: Code): boolean { - return code === codes.eof - }, - - /** - * Set up a state machine to handle character codes streaming in. - * - * @see {@linkcode Effects} - * @see {@linkcode State} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Effects} effects - Context object to transition state machine - * @param {State} ok - Successful tokenization state - * @return {State} Initial state - */ - tokenize(this: TokenizeContext, effects: Effects, ok: State): State { - return eof - - /** - * Tokenize end of file. - * - * @param {Code} code - Current character code - * @return {State} Next state - */ - function eof(code: Code): State { - effects.enter(tt.eof) - effects.consume(code) - effects.exit(tt.eof) - return ok - } - } -} - -export default eof diff --git a/src/constructs/index.ts b/src/constructs/index.ts index 75e2112..75ec3aa 100644 --- a/src/constructs/index.ts +++ b/src/constructs/index.ts @@ -3,5 +3,4 @@ * @module vfile-lexer/constructs */ -export { default as eof } from './eof' export { default as initialize } from './initialize' diff --git a/src/constructs/initialize.ts b/src/constructs/initialize.ts index dbf7036..bd96107 100644 --- a/src/constructs/initialize.ts +++ b/src/constructs/initialize.ts @@ -3,11 +3,12 @@ * @module vfile-lexer/constructs/initialize */ -import { tt } from '#src/enums' -import type { InitialConstruct, TokenizeContext } from '#src/interfaces' -import type { Constructs, Effects, State } from '#src/types' -import { codes, type Code } from '@flex-development/vfile-reader' -import eof from './eof' +import type { + Effects, + InitialConstruct, + TokenizeContext +} from '#src/interfaces' +import type { Code, Constructs, State } from '#src/types' /** * Create an initial construct. @@ -19,49 +20,41 @@ import eof from './eof' * @return {InitialConstruct} Initial construct */ function initialize(constructs: Constructs): InitialConstruct { - return { + return { name: 'vfile-lexer:initialize', tokenize } + + /** + * Set up a state machine to handle character codes streaming in. + * + * @see {@linkcode Effects} + * @see {@linkcode State} + * @see {@linkcode TokenizeContext} + * + * @this {TokenizeContext} + * + * @param {Effects} effects - Context object to transition state machine + * @return {State} Initial state + */ + function tokenize(this: TokenizeContext, effects: Effects): State { + return state + /** - * Construct name. + * Consume `code` and retry {@linkcode constructs}. + * + * @param {Code} code - Current character code + * @return {State | undefined} Next state */ - name: 'vfile-lexer:initialize', + function eat(code: Code): State | undefined { + return effects.consume(code), state + } /** - * Set up a state machine to handle character codes streaming in. + * Try a construct. * - * @see {@linkcode Effects} - * @see {@linkcode State} - * @see {@linkcode TokenizeContext} - * - * @this {TokenizeContext} - * - * @param {Effects} effects - Context object to transition state machine - * @return {State} Initial state + * @param {Code} code - Current character code + * @return {State | undefined} Next state */ - tokenize(this: TokenizeContext, effects: Effects): State { - void (effects.enter(tt.sof), effects.exit(tt.sof)) - return state - - /** - * Consume `code` and try tokenizing the next construct. - * - * @param {Code} code - Current character code - * @return {State | undefined} Next state - */ - function eat(code: Code): State | undefined { - return code === codes.eof - ? effects.attempt(eof)(code) - : (effects.consume(code), state) - } - - /** - * Try to tokenize a construct. - * - * @param {Code} code - Current character code - * @return {State | undefined} Next state - */ - function state(code: Code): State | undefined { - return effects.attempt(constructs, state, eat)(code) - } + function state(code: Code): State | undefined { + return effects.attempt(constructs, state, eat)(code) } } } diff --git a/src/enums/chars.ts b/src/enums/chars.ts new file mode 100644 index 0000000..efa42ac --- /dev/null +++ b/src/enums/chars.ts @@ -0,0 +1,155 @@ +/** + * @file chars + * @module vfile-lexer/chars + */ + +/** + * Character dictionary. + * + * @see https://symbl.cc/en/unicode/blocks/basic-latin + * @see https://symbl.cc/en/unicode/blocks/latin-1-supplement + * + * @enum {string} + */ +const chars = { + ack: '\u0006', + ampersand: '&', + apostrophe: '\'', + asterisk: '*', + at: '@', + backslash: '\\', + bar: '|', + bel: '\u0007', + bom: '\uFEFF', + bs: '\b', + can: '\u0018', + caret: '^', + colon: ':', + comma: ',', + cr: '\r', + crlf: '\r\n', + dc1: '\u0011', + dc2: '\u0012', + dc3: '\u0013', + dc4: '\u0014', + del: '\u007F', + digit0: '0', + digit1: '1', + digit2: '2', + digit3: '3', + digit4: '4', + digit5: '5', + digit6: '6', + digit7: '7', + digit8: '8', + digit9: '9', + dle: '\u0010', + dollar: '$', + dot: '.', + em: '\u0019', + empty: '', + enq: '\u0005', + eof: null, + eot: '\u0004', + equal: '=', + esc: '\u001B', + etb: '\u0017', + etx: '\u0003', + exclamation: '!', + ff: '\f', + fs: '\u001C', + graveAccent: '`', + gs: '\u001D', + gt: '>', + hash: '#', + ht: '\t', + leftBrace: '{', + leftBracket: '[', + leftParen: '(', + lf: '\n', + lowercaseA: 'a', + lowercaseB: 'b', + lowercaseC: 'c', + lowercaseD: 'd', + lowercaseE: 'e', + lowercaseF: 'f', + lowercaseG: 'g', + lowercaseH: 'h', + lowercaseI: 'i', + lowercaseJ: 'j', + lowercaseK: 'k', + lowercaseL: 'l', + lowercaseM: 'm', + lowercaseN: 'n', + lowercaseO: 'o', + lowercaseP: 'p', + lowercaseQ: 'q', + lowercaseR: 'r', + lowercaseS: 's', + lowercaseT: 't', + lowercaseU: 'u', + lowercaseV: 'v', + lowercaseW: 'w', + lowercaseX: 'x', + lowercaseY: 'y', + lowercaseZ: 'z', + ls: '\u2028', + lt: '<', + minus: '-', + nak: '\u0015', + nbsp: '\u00A0', + nul: '\0', + percent: '%', + plus: '+', + ps: '\u2029', + question: '?', + quotation: '"', + replacement: '�', + rightBrace: '}', + rightBracket: ']', + rightParen: ')', + rs: '\u001E', + semicolon: ';', + si: '\u000F', + slash: '/', + so: '\u000E', + soh: '\u0001', + space: ' ', + stx: '\u0002', + sub: '\u001A', + syn: '\u0016', + tilde: '~', + underscore: '_', + uppercaseA: 'A', + uppercaseB: 'B', + uppercaseC: 'C', + uppercaseD: 'D', + uppercaseE: 'E', + uppercaseF: 'F', + uppercaseG: 'G', + uppercaseH: 'H', + uppercaseI: 'I', + uppercaseJ: 'J', + uppercaseK: 'K', + uppercaseL: 'L', + uppercaseM: 'M', + uppercaseN: 'N', + uppercaseO: 'O', + uppercaseP: 'P', + uppercaseQ: 'Q', + uppercaseR: 'R', + uppercaseS: 'S', + uppercaseT: 'T', + uppercaseU: 'U', + uppercaseV: 'V', + uppercaseW: 'W', + uppercaseX: 'X', + uppercaseY: 'Y', + uppercaseZ: 'Z', + us: '\u001F', + vt: '\v', + zwj: '\u200D', + zwnj: '\u200C' +} as const + +export default chars diff --git a/src/enums/codes.ts b/src/enums/codes.ts new file mode 100644 index 0000000..7eca97c --- /dev/null +++ b/src/enums/codes.ts @@ -0,0 +1,170 @@ +/** + * @file Enums - codes + * @module vfile-lexer/enums/codes + */ + +import type { Code } from '#src/types' + +/** + * Character codes. + * + * This module contains constants for the ASCII block and the replacement + * character. + * + * A few codes can be handled in a special way, such as line endings (CR, LF, + * and CR+LF, commonly known as end-of-line: EOLs), as well as tab (horizontal + * tab) and its expansion based on what column it’s at (virtual space). + * + * @see {@linkcode Code} + * @see https://symbl.cc/en/unicode/blocks/basic-latin + * @see https://symbl.cc/en/unicode/blocks/latin-1-supplement + * @see https://util.unicode.org/UnicodeJsps/character.jsp + * @see https://util.unicode.org/UnicodeJsps/list-unicodeset.jsp + * + * @enum {Code} + */ +const codes = { + eof: null, + vcr: -5, // virtual carriage return (\r) + vlf: -4, // virtual line feed (\n) + crlf: -3, // carriage return + line feed (\r + \n) + vht: -2, // virtual horizontal tab (\t) + vs: -1, // virtual space + nul: 0, + soh: 1, + stx: 2, + etx: 3, + eot: 4, + enq: 5, + ack: 6, + bel: 7, + bs: 8, + ht: 9, // \t + lf: 10, // \n + vt: 11, // \v + ff: 12, // \f + cr: 13, // \r + so: 14, + si: 15, + dle: 16, + dc1: 17, + dc2: 18, + dc3: 19, + dc4: 20, + nak: 21, + syn: 22, + etb: 23, + can: 24, + em: 25, + sub: 26, + esc: 27, + fs: 28, + gs: 29, + rs: 30, + us: 31, + space: 32, + exclamation: 33, // ! + quotation: 34, // " + hash: 35, // # + dollar: 36, // $ + percent: 37, // % + ampersand: 38, // & + apostrophe: 39, // ' + leftParen: 40, // ( + rightParen: 41, // ) + asterisk: 42, // * + plus: 43, // + + comma: 44, // , + minus: 45, // - + dot: 46, // . + slash: 47, // / + digit0: 48, // 0 + digit1: 49, // 1 + digit2: 50, // 2 + digit3: 51, // 3 + digit4: 52, // 4 + digit5: 53, // 5 + digit6: 54, // 6 + digit7: 55, // 7 + digit8: 56, // 8 + digit9: 57, // 9 + colon: 58, // = + semicolon: 59, // ; + lt: 60, // < + equal: 61, // = + gt: 62, // > + question: 63, // ? + at: 64, // @ + uppercaseA: 65, // A + uppercaseB: 66, // B + uppercaseC: 67, // C + uppercaseD: 68, // D + uppercaseE: 69, // E + uppercaseF: 70, // F + uppercaseG: 71, // G + uppercaseH: 72, // H + uppercaseI: 73, // I + uppercaseJ: 74, // J + uppercaseK: 75, // K + uppercaseL: 76, // L + uppercaseM: 77, // M + uppercaseN: 78, // N + uppercaseO: 79, // O + uppercaseP: 80, // P + uppercaseQ: 81, // Q + uppercaseR: 82, // R + uppercaseS: 83, // S + uppercaseT: 84, // T + uppercaseU: 85, // U + uppercaseV: 86, // V + uppercaseW: 87, // W + uppercaseX: 88, // X + uppercaseY: 89, // Y + uppercaseZ: 90, // Z + leftBracket: 91, // [ + backslash: 92, // \ + rightBracket: 93, // ] + caret: 94, // ^ + underscore: 95, // _ + graveAccent: 96, // ` + lowercaseA: 97, // a + lowercaseB: 98, // b + lowercaseC: 99, // c + lowercaseD: 100, // d + lowercaseE: 101, // e + lowercaseF: 102, // f + lowercaseG: 103, // g + lowercaseH: 104, // h + lowercaseI: 105, // i + lowercaseJ: 106, // j + lowercaseK: 107, // k + lowercaseL: 108, // l + lowercaseM: 109, // m + lowercaseN: 110, // n + lowercaseO: 111, // o + lowercaseP: 112, // p + lowercaseQ: 113, // q + lowercaseR: 114, // r + lowercaseS: 115, // s + lowercaseT: 116, // t + lowercaseU: 117, // u + lowercaseV: 118, // v + lowercaseW: 119, // w + lowercaseX: 120, // x + lowercaseY: 121, // y + lowercaseZ: 122, // z + leftBrace: 123, // { + bar: 124, // | + rightBrace: 125, // } + tilde: 126, // ~ + del: 127, + nbsp: 160, // \u00A0 + zwnj: 8204, // \u200C + zwj: 8205, // \u200D + ls: 8232, // \u2028 + ps: 8233, // \u2029 + bom: 65_279, // byte order marker + replacement: 65_533 // � +} as const + +export default codes diff --git a/src/enums/index.ts b/src/enums/index.ts index 3794706..f82f9d8 100644 --- a/src/enums/index.ts +++ b/src/enums/index.ts @@ -3,5 +3,6 @@ * @module vfile-lexer/enums */ +export { default as chars } from './chars' +export { default as codes } from './codes' export { default as ev } from './ev' -export { default as tt } from './tt' diff --git a/src/enums/tt.ts b/src/enums/tt.ts deleted file mode 100644 index a28dfd6..0000000 --- a/src/enums/tt.ts +++ /dev/null @@ -1,16 +0,0 @@ -/** - * @file Enums - tt - * @module vfile-lexer/enums/tt - */ - -/** - * Token types. - * - * @enum {Lowercase} - */ -enum tt { - eof = 'eof', - sof = 'sof' -} - -export default tt diff --git a/src/index.ts b/src/index.ts index 78a45bf..dda15a5 100644 --- a/src/index.ts +++ b/src/index.ts @@ -4,9 +4,10 @@ */ export * from './constructs' -export { ev } from './enums' +export { chars, codes, ev } from './enums' export type * from './interfaces' export { default as Lexer } from './lexer' +export { default as preprocess } from './preprocess' export { default as tokenize } from './tokenize' export type * from './types' export * from './utils' diff --git a/src/interfaces/__tests__/construct-record.spec-d.ts b/src/interfaces/__tests__/construct-record.spec-d.ts new file mode 100644 index 0000000..7b90e58 --- /dev/null +++ b/src/interfaces/__tests__/construct-record.spec-d.ts @@ -0,0 +1,31 @@ +/** + * @file Type Tests - ConstructRecord + * @module vfile-lexer/interfaces/tests/unit-d/ConstructRecord + */ + +import { codes } from '#src/enums' +import type { ConstructPack } from '#src/types' +import type { Nilable } from '@flex-development/tutils' +import type TestSubject from '../construct-record' + +describe('unit-d:interfaces/ConstructRecord', () => { + type Value = Nilable + + it('should match [[x: `${number}`]: ConstructPack | null | undefined]', () => { + expectTypeOf() + .toHaveProperty(`${codes.graveAccent}`) + .toEqualTypeOf() + }) + + it('should match [[x: "null"]: ConstructPack | null | undefined]', () => { + expectTypeOf() + .toHaveProperty(`${codes.eof}`) + .toEqualTypeOf() + }) + + it('should match [[x: number]: ConstructPack | null | undefined]', () => { + expectTypeOf() + .toHaveProperty(codes.at) + .toEqualTypeOf() + }) +}) diff --git a/src/interfaces/__tests__/construct.spec-d.ts b/src/interfaces/__tests__/construct.spec-d.ts index 592a8d5..ed6a3c2 100644 --- a/src/interfaces/__tests__/construct.spec-d.ts +++ b/src/interfaces/__tests__/construct.spec-d.ts @@ -4,50 +4,50 @@ */ import type { Guard, Resolver, Tokenizer } from '#src/types' -import type { Nilable } from '@flex-development/tutils' +import type { Optional } from '@flex-development/tutils' import type TestSubject from '../construct' describe('unit-d:interfaces/Construct', () => { - it('should match [name?: string | null | undefined]', () => { + it('should match [name?: string | undefined]', () => { expectTypeOf() .toHaveProperty('name') - .toEqualTypeOf>() + .toEqualTypeOf>() }) - it('should match [partial?: boolean | null | undefined]', () => { + it('should match [partial?: boolean | undefined]', () => { expectTypeOf() .toHaveProperty('partial') - .toEqualTypeOf>() + .toEqualTypeOf>() }) - it('should match [previous?: Guard | null | undefined]', () => { + it('should match [previous?: Guard | undefined]', () => { expectTypeOf() .toHaveProperty('previous') - .toEqualTypeOf>() + .toEqualTypeOf>() }) - it('should match [resolve?: Resolver | null | undefined]', () => { + it('should match [resolve?: Resolver | undefined]', () => { expectTypeOf() .toHaveProperty('resolve') - .toEqualTypeOf>() + .toEqualTypeOf>() }) - it('should match [resolveAll?: Resolver | null | undefined]', () => { + it('should match [resolveAll?: Resolver | undefined]', () => { expectTypeOf() .toHaveProperty('resolveAll') - .toEqualTypeOf>() + .toEqualTypeOf>() }) - it('should match [resolveTo?: Resolver | null | undefined]', () => { + it('should match [resolveTo?: Resolver | undefined]', () => { expectTypeOf() .toHaveProperty('resolveTo') - .toEqualTypeOf>() + .toEqualTypeOf>() }) - it('should match [test?: Guard | null | undefined]', () => { + it('should match [test?: Guard | undefined]', () => { expectTypeOf() .toHaveProperty('test') - .toEqualTypeOf>() + .toEqualTypeOf>() }) it('should match [tokenize: Tokenizer]', () => { diff --git a/src/types/__tests__/effects.spec-d.ts b/src/interfaces/__tests__/effects.spec-d.ts similarity index 79% rename from src/types/__tests__/effects.spec-d.ts rename to src/interfaces/__tests__/effects.spec-d.ts index 2af7f4c..48dd4be 100644 --- a/src/types/__tests__/effects.spec-d.ts +++ b/src/interfaces/__tests__/effects.spec-d.ts @@ -1,15 +1,12 @@ /** * @file Type Tests - Effects - * @module vfile-lexer/types/tests/unit-d/Effects + * @module vfile-lexer/interfaces/tests/unit-d/Effects */ -import type Attempt from '../attempt' -import type Consume from '../consume' +import type { Attempt, Consume, Enter, Exit } from '#src/types' import type TestSubject from '../effects' -import type Enter from '../enter' -import type Exit from '../exit' -describe('unit-d:types/Effects', () => { +describe('unit-d:interfaces/Effects', () => { it('should match [attempt: Attempt]', () => { expectTypeOf() .toHaveProperty('attempt') diff --git a/src/interfaces/__tests__/options-preprocess.spec-d.ts b/src/interfaces/__tests__/options-preprocess.spec-d.ts new file mode 100644 index 0000000..6b4e3b4 --- /dev/null +++ b/src/interfaces/__tests__/options-preprocess.spec-d.ts @@ -0,0 +1,15 @@ +/** + * @file Type Tests - PreprocessOptions + * @module vfile-lexer/interfaces/tests/unit-d/PreprocessOptions + */ + +import type { Optional } from '@flex-development/tutils' +import type TestSubject from '../options-preprocess' + +describe('unit-d:interfaces/PreprocessOptions', () => { + it('should match [tabSize?: number | undefined]', () => { + expectTypeOf() + .toHaveProperty('tabSize') + .toEqualTypeOf>() + }) +}) diff --git a/src/interfaces/__tests__/options.spec-d.ts b/src/interfaces/__tests__/options.spec-d.ts index e3262ab..8340b6a 100644 --- a/src/interfaces/__tests__/options.spec-d.ts +++ b/src/interfaces/__tests__/options.spec-d.ts @@ -3,9 +3,16 @@ * @module vfile-lexer/interfaces/tests/unit-d/Options */ -import type { Constructs, FinalizeContext, TokenFactory } from '#src/types' +import type { + CodeCheck, + Constructs, + FinalizeContext, + Preprocessor, + Resolver, + TokenFactory +} from '#src/types' import type { Nilable } from '@flex-development/tutils' -import type { Point } from '@flex-development/vfile-reader' +import type { Point } from '@flex-development/vfile-location' import type InitialConstruct from '../construct-initial' import type TestSubject from '../options' @@ -16,12 +23,6 @@ describe('unit-d:interfaces/Options', () => { .toEqualTypeOf>() }) - it('should match [context?: FinalizeContext | null | undefined]', () => { - expectTypeOf() - .toHaveProperty('context') - .toEqualTypeOf>() - }) - it('should match [debug?: string | null | undefined]', () => { expectTypeOf() .toHaveProperty('debug') @@ -34,6 +35,18 @@ describe('unit-d:interfaces/Options', () => { .toEqualTypeOf>() }) + it('should match [eol?: CodeCheck | null | undefined]', () => { + expectTypeOf() + .toHaveProperty('eol') + .toEqualTypeOf>() + }) + + it('should match [finalizeContext?: FinalizeContext | null | undefined]', () => { + expectTypeOf() + .toHaveProperty('finalizeContext') + .toEqualTypeOf>() + }) + it('should match [from?: Point | null | undefined]', () => { expectTypeOf() .toHaveProperty('from') @@ -46,9 +59,21 @@ describe('unit-d:interfaces/Options', () => { .toEqualTypeOf>() }) - it('should match [token: TokenFactory]', () => { + it('should match [preprocess?: Preprocessor | null | undefined]', () => { + expectTypeOf() + .toHaveProperty('preprocess') + .toEqualTypeOf>() + }) + + it('should match [resolvers?: readonly Resolver[] | null | undefined]', () => { + expectTypeOf() + .toHaveProperty('resolvers') + .toEqualTypeOf>() + }) + + it('should match [token?: TokenFactory | null | undefined]', () => { expectTypeOf() .toHaveProperty('token') - .toEqualTypeOf() + .toEqualTypeOf>() }) }) diff --git a/src/interfaces/__tests__/place.spec-d.ts b/src/interfaces/__tests__/place.spec-d.ts new file mode 100644 index 0000000..4b6cc25 --- /dev/null +++ b/src/interfaces/__tests__/place.spec-d.ts @@ -0,0 +1,17 @@ +/** + * @file Type Tests - Place + * @module vfile-lexer/interfaces/tests/unit-d/Place + */ + +import type { Point } from '@flex-development/vfile-location' +import type TestSubject from '../place' + +describe('unit-d:interfaces/Place', () => { + it('should extend Point', () => { + expectTypeOf().toMatchTypeOf() + }) + + it('should match [_index: number]', () => { + expectTypeOf().toHaveProperty('_index').toEqualTypeOf() + }) +}) diff --git a/src/interfaces/__tests__/position.spec-d.ts b/src/interfaces/__tests__/position.spec-d.ts new file mode 100644 index 0000000..70dd52e --- /dev/null +++ b/src/interfaces/__tests__/position.spec-d.ts @@ -0,0 +1,17 @@ +/** + * @file Type Tests - Position + * @module vfile-lexer/interfaces/tests/unit-d/Position + */ + +import type Place from '../place' +import type TestSubject from '../position' + +describe('unit-d:interfaces/Position', () => { + it('should match [end: Place]', () => { + expectTypeOf().toHaveProperty('end').toEqualTypeOf() + }) + + it('should match [start: Place]', () => { + expectTypeOf().toHaveProperty('start').toEqualTypeOf() + }) +}) diff --git a/src/interfaces/__tests__/token-fields.spec-d.ts b/src/interfaces/__tests__/token-fields.spec-d.ts new file mode 100644 index 0000000..0411203 --- /dev/null +++ b/src/interfaces/__tests__/token-fields.spec-d.ts @@ -0,0 +1,12 @@ +/** + * @file Type Tests - TokenFields + * @module vfile-lexer/interfaces/tests/unit-d/TokenFields + */ + +import type TestSubject from '../token-fields' + +describe('unit-d:interfaces/TokenFields', () => { + it('should register token fields', () => { + expectTypeOf().not.toBeNever() + }) +}) diff --git a/src/interfaces/__tests__/token-info.spec-d.ts b/src/interfaces/__tests__/token-info.spec-d.ts new file mode 100644 index 0000000..b861759 --- /dev/null +++ b/src/interfaces/__tests__/token-info.spec-d.ts @@ -0,0 +1,31 @@ +/** + * @file Type Tests - TokenInfo + * @module vfile-lexer/interfaces/tests/unit-d/TokenInfo + */ + +import type Position from '../position' +import type Token from '../token' +import type TokenFields from '../token-fields' +import type TestSubject from '../token-info' + +describe('unit-d:interfaces/TokenInfo', () => { + it('should extend Position', () => { + expectTypeOf().toMatchTypeOf() + }) + + it('should extend TokenFields', () => { + expectTypeOf().toMatchTypeOf() + }) + + it('should match [next?: Token | undefined]', () => { + expectTypeOf() + .toHaveProperty('next') + .toEqualTypeOf() + }) + + it('should match [previous?: Token | undefined]', () => { + expectTypeOf() + .toHaveProperty('previous') + .toEqualTypeOf() + }) +}) diff --git a/src/interfaces/__tests__/token-type-map.spec-d.ts b/src/interfaces/__tests__/token-type-map.spec-d.ts index b56f639..96ad944 100644 --- a/src/interfaces/__tests__/token-type-map.spec-d.ts +++ b/src/interfaces/__tests__/token-type-map.spec-d.ts @@ -3,12 +3,10 @@ * @module vfile-lexer/interfaces/tests/unit-d/TokenTypeMap */ -import type { tt } from '#src/enums' import type TestSubject from '../token-type-map' describe('unit-d:interfaces/TokenTypeMap', () => { it('should register token types', () => { - expectTypeOf().exclude().not.toBeNever() - expectTypeOf().extract().not.toBeNever() + expectTypeOf().not.toBeNever() }) }) diff --git a/src/interfaces/__tests__/token.spec-d.ts b/src/interfaces/__tests__/token.spec-d.ts index bae7f62..fac56da 100644 --- a/src/interfaces/__tests__/token.spec-d.ts +++ b/src/interfaces/__tests__/token.spec-d.ts @@ -4,24 +4,12 @@ */ import type { TokenType } from '#src/types' -import type { Position } from '@flex-development/vfile-reader' import type TestSubject from '../token' +import type TokenInfo from '../token-info' describe('unit-d:interfaces/Token', () => { - it('should extend Position', () => { - expectTypeOf().toMatchTypeOf() - }) - - it('should match [next?: Token | undefined]', () => { - expectTypeOf() - .toHaveProperty('next') - .toEqualTypeOf() - }) - - it('should match [previous?: Token | undefined]', () => { - expectTypeOf() - .toHaveProperty('previous') - .toEqualTypeOf() + it('should extend TokenInfo', () => { + expectTypeOf().toMatchTypeOf() }) it('should match [type: T]', () => { diff --git a/src/interfaces/__tests__/tokenize-context.spec-d.ts b/src/interfaces/__tests__/tokenize-context.spec-d.ts index c25bca2..408c029 100644 --- a/src/interfaces/__tests__/tokenize-context.spec-d.ts +++ b/src/interfaces/__tests__/tokenize-context.spec-d.ts @@ -3,38 +3,34 @@ * @module vfile-lexer/interfaces/tests/unit-d/TokenizeContext */ -import type { Event } from '#src/types' -import type { Nilable } from '@flex-development/tutils' import type { Code, - CodeCheckFactory, - CodeReader -} from '@flex-development/vfile-reader' + DefineSkip, + Event, + Now, + SliceSerialize, + SliceStream, + Write +} from '#src/types' +import type { Optional } from '@flex-development/tutils' import type Construct from '../construct' -import type Token from '../token' import type TestSubject from '../tokenize-context' describe('unit-d:interfaces/TokenizeContext', () => { - it('should match [check: CodeCheckFactory]', () => { - expectTypeOf() - .toHaveProperty('check') - .toEqualTypeOf() - }) - it('should match [code: Code]', () => { expectTypeOf().toHaveProperty('code').toEqualTypeOf() }) - it('should match [currentConstruct?: Construct | null | undefined]', () => { + it('should match [currentConstruct?: Construct | undefined]', () => { expectTypeOf() .toHaveProperty('currentConstruct') - .toEqualTypeOf>() + .toEqualTypeOf>() }) - it('should match [disabled: readonly string[]]', () => { + it('should match [defineSkip: DefineSkip]', () => { expectTypeOf() - .toHaveProperty('disabled') - .toEqualTypeOf() + .toHaveProperty('defineSkip') + .toEqualTypeOf() }) it('should match [events: Event[]]', () => { @@ -43,59 +39,37 @@ describe('unit-d:interfaces/TokenizeContext', () => { .toEqualTypeOf() }) - it('should match [includes: CodeReader["includes"]]', () => { - expectTypeOf() - .toHaveProperty('includes') - .toEqualTypeOf() - }) - - it('should match [interrupt?: boolean | null | undefined]', () => { + it('should match [interrupt?: boolean | undefined]', () => { expectTypeOf() .toHaveProperty('interrupt') - .toEqualTypeOf>() + .toEqualTypeOf>() }) it('should match [next: Code]', () => { expectTypeOf().toHaveProperty('next').toEqualTypeOf() }) - it('should match [now: CodeReader["now"]]', () => { - expectTypeOf() - .toHaveProperty('now') - .toEqualTypeOf() - }) - - it('should match [peek: CodeReader["peek"]]', () => { - expectTypeOf() - .toHaveProperty('peek') - .toEqualTypeOf() + it('should match [now: Now]', () => { + expectTypeOf().toHaveProperty('now').toEqualTypeOf() }) it('should match [previous: Code]', () => { expectTypeOf().toHaveProperty('previous').toEqualTypeOf() }) - it('should match [serialize: CodeReader["serialize"]]', () => { - expectTypeOf() - .toHaveProperty('serialize') - .toEqualTypeOf() - }) - - it('should match [slice: CodeReader["slice"]]', () => { + it('should match [sliceSerialize: SliceSerialize]', () => { expectTypeOf() - .toHaveProperty('slice') - .toEqualTypeOf() + .toHaveProperty('sliceSerialize') + .toEqualTypeOf() }) - it('should match [sliceSerialize: CodeReader["sliceSerialize"]]', () => { + it('should match [sliceStream: SliceStream]', () => { expectTypeOf() - .toHaveProperty('sliceSerialize') - .toEqualTypeOf() + .toHaveProperty('sliceStream') + .toEqualTypeOf() }) - it('should match [token: Readonly]', () => { - expectTypeOf() - .toHaveProperty('token') - .toEqualTypeOf>() + it('should match [write: Write]', () => { + expectTypeOf().toHaveProperty('write').toEqualTypeOf() }) }) diff --git a/src/interfaces/construct-record.ts b/src/interfaces/construct-record.ts new file mode 100644 index 0000000..0a95445 --- /dev/null +++ b/src/interfaces/construct-record.ts @@ -0,0 +1,27 @@ +/** + * @file Type Aliases - ConstructRecord + * @module vfile-lexer/interfaces/ConstructRecord + */ + +import type { ConstructPack } from '#src/types' + +/** + * Several constructs, mapped from their initial codes. + */ +interface ConstructRecord { + /** + * Try tokenizing constructs that start with the specified character code. + * + * @see {@linkcode ConstructPack} + */ + [code: `${number}` | number]: ConstructPack | null | undefined + + /** + * Try tokenizing constructs that start with any character code. + * + * @see {@linkcode ConstructPack} + */ + null?: ConstructPack | null | undefined +} + +export type { ConstructRecord as default } diff --git a/src/interfaces/construct.ts b/src/interfaces/construct.ts index 6d5fdcd..e147385 100644 --- a/src/interfaces/construct.ts +++ b/src/interfaces/construct.ts @@ -12,26 +12,26 @@ interface Construct { /** * Name of the construct, used to toggle constructs off. */ - name?: string | null | undefined + name?: string | undefined /** * Whether this construct represents a partial construct. */ - partial?: boolean | null | undefined + partial?: boolean | undefined /** * Check if the previous character code can come before this construct. * * @see {@linkcode Guard} */ - previous?: Guard | null | undefined + previous?: Guard | undefined /** * Resolve the events parsed by {@linkcode tokenize}. * * @see {@linkcode Resolver} */ - resolve?: Resolver | null | undefined + resolve?: Resolver | undefined /** * Resolve all events when the content is complete, from the start to the end. @@ -39,7 +39,7 @@ interface Construct { * * @see {@linkcode Resolver} */ - resolveAll?: Resolver | null | undefined + resolveAll?: Resolver | undefined /** * Resolve the events from the start of the content (which may include other @@ -47,14 +47,14 @@ interface Construct { * * @see {@linkcode Resolver} */ - resolveTo?: Resolver | null | undefined + resolveTo?: Resolver | undefined /** * Check if the current character code can start this construct. * * @see {@linkcode Guard} */ - test?: Guard | null | undefined + test?: Guard | undefined /** * Set up a state machine to handle character codes streaming in. diff --git a/src/types/effects.ts b/src/interfaces/effects.ts similarity index 77% rename from src/types/effects.ts rename to src/interfaces/effects.ts index d78baa7..cc6854f 100644 --- a/src/types/effects.ts +++ b/src/interfaces/effects.ts @@ -1,17 +1,14 @@ /** - * @file Type Aliases - Effects - * @module vfile-lexer/types/Effects + * @file Interfaces - Effects + * @module vfile-lexer/interfaces/Effects */ -import type Attempt from './attempt' -import type Consume from './consume' -import type Enter from './enter' -import type Exit from './exit' +import type { Attempt, Consume, Enter, Exit } from '#src/types' /** * Context object to transition the state machine. */ -type Effects = { +interface Effects { /** * Try to tokenize a construct. * diff --git a/src/interfaces/index.ts b/src/interfaces/index.ts index e30fcf2..e061627 100644 --- a/src/interfaces/index.ts +++ b/src/interfaces/index.ts @@ -3,10 +3,17 @@ * @module vfile-lexer/interfaces */ -export type { Point, Position } from '@flex-development/vfile-reader' +export type { Point } from '@flex-development/vfile-location' export type { default as Construct } from './construct' export type { default as InitialConstruct } from './construct-initial' +export type { default as ConstructRecord } from './construct-record' +export type { default as Effects } from './effects' export type { default as Options } from './options' +export type { default as PreprocessOptions } from './options-preprocess' +export type { default as Place } from './place' +export type { default as Position } from './position' export type { default as Token } from './token' +export type { default as TokenFields } from './token-fields' +export type { default as TokenInfo } from './token-info' export type { default as TokenTypeMap } from './token-type-map' export type { default as TokenizeContext } from './tokenize-context' diff --git a/src/interfaces/options-preprocess.ts b/src/interfaces/options-preprocess.ts new file mode 100644 index 0000000..7627810 --- /dev/null +++ b/src/interfaces/options-preprocess.ts @@ -0,0 +1,18 @@ +/** + * @file Interfaces - PreprocessOptions + * @module vfile-lexer/interfaces/PreprocessOptions + */ + +/** + * Preprocessor configuration options. + */ +interface PreprocessOptions { + /** + * Number of spaces a tab is equivalent to. + * + * @default 2 + */ + tabSize?: number | undefined +} + +export type { PreprocessOptions as default } diff --git a/src/interfaces/options.ts b/src/interfaces/options.ts index 2fa3003..2cb555d 100644 --- a/src/interfaces/options.ts +++ b/src/interfaces/options.ts @@ -3,8 +3,16 @@ * @module vfile-lexer/interfaces/Options */ -import type { Constructs, FinalizeContext, TokenFactory } from '#src/types' -import type { Point } from '@flex-development/vfile-reader' +import type { + CodeCheck, + Constructs, + FinalizeContext, + Preprocessor, + Resolver, + TokenFactory +} from '#src/types' +import type { u } from '@flex-development/unist-util-builder' +import type { Point } from '@flex-development/vfile-location' import type InitialConstruct from './construct-initial' /** @@ -18,13 +26,6 @@ interface Options { */ constructs?: Constructs | null | undefined - /** - * Finalize the tokenization context. - * - * @see {@linkcode FinalizeContext} - */ - context?: FinalizeContext | null | undefined - /** * Debug logger name. * @@ -37,6 +38,20 @@ interface Options { */ disabled?: readonly string[] | null | undefined + /** + * Line ending code check. + * + * @see {@linkcode CodeCheck} + */ + eol?: CodeCheck | null | undefined + + /** + * Finalize the tokenization context. + * + * @see {@linkcode FinalizeContext} + */ + finalizeContext?: FinalizeContext | null | undefined + /** * Point before first character in file. * @@ -47,18 +62,35 @@ interface Options { from?: Point | null | undefined /** - * Initialization construct. + * Initial construct. * * @see {@linkcode InitialConstruct} */ initialize?: InitialConstruct | null | undefined + /** + * Turn a value into character code chunks. + * + * @see {@linkcode Preprocessor} + */ + preprocess?: Preprocessor | null | undefined + + /** + * End of stream resolvers. + * + * @see {@linkcode Resolver} + */ + resolvers?: readonly Resolver[] | null | undefined + /** * Create a new token. * * @see {@linkcode TokenFactory} + * @see {@linkcode u} + * + * @default u */ - token: TokenFactory + token?: TokenFactory | null | undefined } export type { Options as default } diff --git a/src/interfaces/place.ts b/src/interfaces/place.ts new file mode 100644 index 0000000..65271e3 --- /dev/null +++ b/src/interfaces/place.ts @@ -0,0 +1,22 @@ +/** + * @file Interfaces - Place + * @module vfile-lexer/interfaces/Place + */ + +import type { Point } from '@flex-development/vfile-location' + +/** + * One place in a file, with additional chunk metadata. + * + * @see {@linkcode Point} + * + * @extends {Point} + */ +interface Place extends Point { + /** + * Index of character code chunk. + */ + _index: number +} + +export type { Place as default } diff --git a/src/interfaces/position.ts b/src/interfaces/position.ts new file mode 100644 index 0000000..b95aba2 --- /dev/null +++ b/src/interfaces/position.ts @@ -0,0 +1,27 @@ +/** + * @file Type Aliases - Position + * @module vfile-lexer/types/Position + */ + +import type Place from './place' + +/** + * Range between two points in a source file. + */ +interface Position { + /** + * Place of last character code in range. + * + * @see {@linkcode Place} + */ + end: Place + + /** + * Place of first character code in range. + * + * @see {@linkcode Place} + */ + start: Place +} + +export type { Position as default } diff --git a/src/interfaces/token-fields.ts b/src/interfaces/token-fields.ts new file mode 100644 index 0000000..046207a --- /dev/null +++ b/src/interfaces/token-fields.ts @@ -0,0 +1,20 @@ +/** + * @file Interfaces - TokenFields + * @module vfile-lexer/interfaces/TokenFields + */ + +/** + * Token fields registry. + * + * This interface can be augmented to register custom token fields. + * + * @example + * declare module '@flex-development/vfile-lexer' { + * interface TokenFields { + * value?: string | null + * } + * } + */ +interface TokenFields {} + +export type { TokenFields as default } diff --git a/src/interfaces/token-info.ts b/src/interfaces/token-info.ts new file mode 100644 index 0000000..fb4287b --- /dev/null +++ b/src/interfaces/token-info.ts @@ -0,0 +1,35 @@ +/** + * @file Interfaces - TokenInfo + * @module vfile-lexer/interfaces/TokenInfo + */ + +import type Position from './position' +import type Token from './token' +import type TokenFields from './token-fields' + +/** + * Token data. + * + * @see {@linkcode Position} + * @see {@linkcode TokenFields} + * + * @extends {Position} + * @extends {TokenFields} + */ +interface TokenInfo extends Position, TokenFields { + /** + * Next token. + * + * @see {@linkcode Token} + */ + next?: Token | undefined + + /** + * Previous token. + * + * @see {@linkcode Token} + */ + previous?: Token | undefined +} + +export type { TokenInfo as default } diff --git a/src/interfaces/token-type-map.ts b/src/interfaces/token-type-map.ts index cc74152..8063df1 100644 --- a/src/interfaces/token-type-map.ts +++ b/src/interfaces/token-type-map.ts @@ -3,8 +3,6 @@ * @module vfile-lexer/interfaces/TokenTypeMap */ -import type { tt } from '#src/enums' - /** * Token type registry. * @@ -13,13 +11,10 @@ import type { tt } from '#src/enums' * @example * declare module '@flex-development/vfile-lexer' { * interface TokenTypeMap { - * type: TokenType + * whitespace: tt.whitespace * } * } */ -interface TokenTypeMap { - eof: tt.eof - sof: tt.sof -} +interface TokenTypeMap {} export type { TokenTypeMap as default } diff --git a/src/interfaces/token.ts b/src/interfaces/token.ts index 2d52fcd..1873daa 100644 --- a/src/interfaces/token.ts +++ b/src/interfaces/token.ts @@ -3,8 +3,8 @@ * @module vfile-lexer/interfaces/Token */ -import type { TokenType } from '#src/types' -import type { Code, Position } from '@flex-development/vfile-reader' +import type { Code, TokenType } from '#src/types' +import type TokenInfo from './token-info' /** * A span of one (`1`) or more character codes. @@ -26,24 +26,14 @@ import type { Code, Position } from '@flex-development/vfile-reader' * } * * @see {@linkcode Code} - * @see {@linkcode Position} + * @see {@linkcode TokenInfo} * @see {@linkcode TokenType} * * @template {TokenType} [T=TokenType] - Token type * - * @extends {Position} + * @extends {TokenInfo} */ -interface Token extends Position { - /** - * Next token in linked token list. - */ - next?: Token | undefined - - /** - * Previous token in linked token list. - */ - previous?: Token | undefined - +interface Token extends TokenInfo { /** * Token type. */ diff --git a/src/interfaces/tokenize-context.ts b/src/interfaces/tokenize-context.ts index 1ccd45c..2fdf93b 100644 --- a/src/interfaces/tokenize-context.ts +++ b/src/interfaces/tokenize-context.ts @@ -3,26 +3,21 @@ * @module vfile-lexer/interfaces/TokenizeContext */ -import type { Event } from '#src/types' import type { Code, - CodeCheckFactory, - CodeReader -} from '@flex-development/vfile-reader' + DefineSkip, + Event, + Now, + SliceSerialize, + SliceStream, + Write +} from '#src/types' import type Construct from './construct' -import type Token from './token' /** * Context object to assist with tokenization. */ interface TokenizeContext { - /** - * Create a code check from a regular expression. - * - * @see {@linkcode CodeCheckFactory} - */ - check: CodeCheckFactory - /** * Get the current character code. * @@ -39,32 +34,26 @@ interface TokenizeContext { * * @see {@linkcode Construct} */ - currentConstruct?: Construct | null | undefined + currentConstruct?: Construct | undefined /** - * Disabled construct names. + * Define a skip. + * + * @see {@linkcode DefineSkip} */ - disabled: readonly string[] + defineSkip: DefineSkip /** - * Current list of events. + * List of events. * * @see {@linkcode Event} */ events: Event[] /** - * Check if the file contains the given search value, relative to the current - * reader position. - * - * @see {@linkcode CodeReader.includes} + * Boolean indicating a construct is interrupting another construct. */ - includes: CodeReader['includes'] - - /** - * Boolean indicating the a construct is interrupting another construct. - */ - interrupt?: boolean | null | undefined + interrupt?: boolean | undefined /** * Get the next character code. @@ -78,20 +67,13 @@ interface TokenizeContext { /** * Get the current point in the file. * - * @see {@linkcode CodeReader.now} - */ - now: CodeReader['now'] - - /** - * Get the next `k`-th code point from the file without changing the position - * of the reader, with `null` denoting end of file. - * - * @see {@linkcode CodeReader.peek} + * @see {@linkcode Now} */ - peek: CodeReader['peek'] + now: Now /** - * Get the previous character code. + * Get the previous character code without changing the position of the + * reader. * * @see {@linkcode Code} * @@ -100,34 +82,28 @@ interface TokenizeContext { get previous(): Code /** - * Convert the specified sequence of character codes to a string. + * Get the text spanning the specified range without changing the position of + * the reader. * - * @see {@linkcode CodeReader.serialize} + * @see {@linkcode SliceSerialize} */ - serialize: CodeReader['serialize'] + sliceSerialize: SliceSerialize /** - * Get the character codes spanning the specified range without changing the - * position of the reader. + * Get the chunks spanning the specified range. * - * @see {@linkcode CodeReader.slice} + * @see {@linkcode SliceStream} */ - slice: CodeReader['slice'] + sliceStream: SliceStream /** - * Get the text spanning the specified range without changing the position of - * the reader. + * Write a slice of chunks. * - * @see {@linkcode CodeReader.sliceSerialize} - */ - sliceSerialize: CodeReader['sliceSerialize'] - - /** - * Current tail token. + * The eof code (`null`) can be used to signal the end of the stream. * - * @see {@linkcode Token} + * @see {@linkcode Write} */ - get token(): Readonly + write: Write } export type { TokenizeContext as default } diff --git a/src/lexer.ts b/src/lexer.ts index e2b4d00..e3acafa 100644 --- a/src/lexer.ts +++ b/src/lexer.ts @@ -3,65 +3,90 @@ * @module vfile-lexer/lexer */ -import { - CodeReader as Reader, - codes, - type Code, - type Offset, - type Point -} from '@flex-development/vfile-reader' +import { u } from '@flex-development/unist-util-builder' +import { Location } from '@flex-development/vfile-location' import debug from 'debug' import { ok as assert } from 'devlop' -import { splice } from 'micromark-util-chunked' -import type { VFile, Value } from 'vfile' +import { push, splice } from 'micromark-util-chunked' import { initialize } from './constructs' -import { ev } from './enums' +import { chars, codes, ev } from './enums' import type { Construct, + ConstructRecord, + Effects, InitialConstruct, Options, + Place, + Point, + Position, Token, + TokenFields, + TokenInfo, TokenizeContext } from './interfaces' +import preprocess from './preprocess' import type { Attempt, - ConstructRecord, + Chunk, + Code, + CodeCheck, + Column, + ConstructPack, Constructs, - Effects, + DefineSkip, Event, + Line, + Offset, + Preprocessor, + Resolver, ReturnHandle, State, TokenFactory, - TokenFields, TokenType } from './types' -import { resolveAll } from './utils' +import { isLineEnding, resolveAll } from './utils' /** * Source file tokenizer. * + * @see {@linkcode Location} + * @see {@linkcode TokenizeContext} + * * @class + * @extends {Location} + * @implements {TokenizeContext} */ -class Lexer { +class Lexer extends Location implements TokenizeContext { /** - * Expected character code, used for tracking bugs. + * Character code chunks. * * @see {@linkcode Code} * * @protected * @instance + * @member {Code[]} chunks + */ + protected chunks: Code[] + + /** + * Expected character code, used for tracking bugs. + * + * @see {@linkcode Code} + * + * @private + * @instance * @member {Code} code */ - protected code: Code + #code: Code /** * Character code consumption state, used for tracking bugs. * - * @protected + * @private * @instance * @member {boolean | null} consumed */ - protected consumed: boolean | null + #consumed: boolean | null /** * Tokenize context. @@ -86,13 +111,13 @@ class Lexer { protected debug: debug.Debugger /** - * Disabled construct names. + * List of disabled constructs. * - * @public + * @protected * @instance * @member {ReadonlyArray} disabled */ - public disabled: readonly string[] + protected disabled: readonly string[] /** * Context object to transition the state machine. @@ -106,13 +131,15 @@ class Lexer { protected effects: Effects /** - * Boolean indicating end of file has been reached. + * Line ending code check. + * + * @see {@linkcode CodeCheck} * * @protected * @instance - * @member {boolean} eof + * @member {CodeCheck} eol */ - protected eof: boolean + protected eol: CodeCheck /** * List of events. @@ -126,18 +153,7 @@ class Lexer { public events: Event[] /** - * Head token. - * - * @see {@linkcode Token} - * - * @public - * @instance - * @member {Token} head - */ - public head!: Token - - /** - * Initialization construct. + * Initial construct. * * @see {@linkcode InitialConstruct} * @@ -154,51 +170,62 @@ class Lexer { * * @protected * @instance - * @member {Construct | null | undefined} lastConstruct + * @member {Construct | undefined} lastConstruct */ - protected lastConstruct: Construct | null | undefined + protected lastConstruct: Construct | undefined /** * Last {@linkcode events} length. * - * @see {@linkcode Offset} - * * @protected * @instance - * @member {Offset} lastEvent + * @member {number} lastEvent */ - protected lastEvent: Offset + protected lastEvent: number /** - * Last reader index. + * Last place. * - * @see {@linkcode Offset} + * @see {@linkcode Place} * * @protected * @instance - * @member {Offset} lastIndex + * @member {Place} lastPlace */ - protected lastIndex: Offset + protected lastPlace: Place /** - * Last tail token. + * Last token stack. + * + * @see {@linkcode Token} * * @protected * @instance - * @member {Token | null} lastToken + * @member {Token[]} lastStack */ - protected lastToken: Token | null + protected lastStack: Token[] /** - * Source file reader. + * Current point in file. * - * @see {@linkcode Reader} + * @see {@linkcode Place} + * + * @public + * @instance + * @member {Place} place + */ + declare public place: Place + + /** + * Turn a value into character code chunks. + * + * @see {@linkcode Preprocessor} * * @protected * @instance - * @member {Reader} reader + * @member {Preprocessor} preprocess */ - protected reader: Reader + protected preprocess: Preprocessor /** * Constructs with `resolveAll` handlers. @@ -212,26 +239,50 @@ class Lexer { protected resolveAll: Construct[] /** - * Current state. + * End of stream resolvers. * - * @see {@linkcode State} + * @see {@linkcode Resolver} * * @protected * @instance - * @member {State | undefined} state + * @member {Resolver[]} resolvers */ - protected state: State | undefined + protected resolvers: Resolver[] + + /** + * Map, where each key is a line number and each value a column to be skipped + * to when the internal reader is on that line. + * + * @see {@linkcode Column} + * @see {@linkcode Line} + * + * @protected + * @instance + * @member {Record} skips + */ + protected skips: Record /** - * Tail token. + * Token stack. * * @see {@linkcode Token} * - * @public + * @protected + * @instance + * @member {Token[]} stack + */ + protected stack: Token[] + + /** + * Current state. + * + * @see {@linkcode State} + * + * @protected * @instance - * @member {Token} tail + * @member {State | undefined} state */ - public tail!: Token + protected state: State | undefined /** * Token factory. @@ -248,30 +299,53 @@ class Lexer { * Create a new file tokenizer. * * @see {@linkcode Options} - * @see {@linkcode VFile} - * @see {@linkcode Value} * - * @param {Value | VFile | null | undefined} file - File to tokenize - * @param {Options} options - Tokenization options + * @param {Options | null | undefined} [options] - Tokenize options */ - constructor(file: Value | VFile | null | undefined, options: Options) { - assert(options.token, 'expected token factory') - - this.debug = debug(options.debug ?? 'vfile-lexer') - this.disabled = Object.freeze(options.disabled ?? []) - this.reader = new Reader(file, options.from) - this.token = options.token + constructor(options?: Options | null | undefined) { + super(null, options?.from) + this.place._index = 0 + + const { + constructs, + debug: debugName, + disabled, + eol, + finalizeContext, + initialize: initializer, + preprocess: preprocessor, + resolvers, + token + } = options ?? {} + + this.debug = debug(debugName ?? 'vfile-lexer') + this.disabled = Object.freeze(disabled ?? []) + this.initialize = initializer ?? initialize(constructs ?? {}) + this.preprocess = preprocessor ?? preprocess() + this.resolvers = resolvers ? [...resolvers] : [] + this.token = token ?? function token( + type: TokenType, + info: TokenInfo + ): Token { + return Object.defineProperties(u(type, info), { + next: { enumerable: false, writable: true }, + previous: { enumerable: false, writable: true } + }) + } - this.code = this.reader.read() - this.consumed = true - this.eof = false + assert(typeof this.token, 'expected token factory') + this.#code = codes.eof + this.#consumed = true + this.chunks = [] + this.eol = eol ?? isLineEnding this.events = [] - this.initialize = options.initialize ?? initialize(options.constructs ?? []) - this.lastConstruct = null + this.lastConstruct = undefined this.lastEvent = 0 - this.lastIndex = 0 - this.lastToken = null + this.lastPlace = this.now() + this.lastStack = [] this.resolveAll = [] + this.skips = {} + this.stack = [] /** * Base context object. @@ -279,33 +353,23 @@ class Lexer { * @const {TokenizeContext} context */ const context: TokenizeContext = Object.defineProperties({ - check: this.reader.check.bind(this.reader), - code: this.code, + code: codes.eof, currentConstruct: this.lastConstruct, - disabled: this.disabled, + defineSkip: this.defineSkip.bind(this), events: this.events, - includes: this.reader.includes.bind(this.reader), - next: this.reader.peek(), + next: codes.eof, now: this.now.bind(this), - peek: this.reader.peek.bind(this.reader), - previous: this.reader.previous, - serialize: this.reader.serialize.bind(this.reader), - slice: this.reader.slice.bind(this.reader), - sliceSerialize: this.reader.sliceSerialize.bind(this.reader), - token: this.tail + previous: codes.eof, + sliceSerialize: this.sliceSerialize.bind(this), + sliceStream: this.sliceStream.bind(this), + write: this.write.bind(this) }, { /* c8 ignore next 6 */ - code: { configurable: false, get: (): Code => this.reader.output }, - next: { configurable: false, get: (): Code => this.reader.peek() }, - previous: { configurable: false, get: (): Code => this.reader.previous }, - token: { - configurable: false, - get: (): Readonly => Object.freeze(Object.assign({}, this.tail)) - } + code: { configurable: false, get: (): Code => this.code }, + next: { configurable: false, get: (): Code => this.next }, + previous: { configurable: false, get: (): Code => this.previous } }) - this.context = options.context?.(context) ?? context - this.effects = { attempt: this.constructFactory(this.resolve.bind(this)), check: this.constructFactory(this.restore.bind(this)), @@ -315,26 +379,72 @@ class Lexer { interrupt: this.constructFactory(this.restore.bind(this), true) } - if (this.initialize.resolveAll) this.resolveAll.push(this.initialize) + this.context = context + this.context = finalizeContext?.call(this, this.context) ?? this.context + this.state = this.initialize.tokenize.call(this.context, this.effects) + this.initialize.resolveAll && this.resolveAll.push(this.initialize) } /** - * Create a new file tokenizer for `file`. + * Get the current character code without changing the position of the reader. * - * @see {@linkcode Options} - * @see {@linkcode VFile} - * @see {@linkcode Value} + * > 👉 Equivalent to `this.peek(0)`. + * + * @see {@linkcode Code} + * + * @public + * @instance + * + * @return {Code} Current character code + */ + public get code(): Code { + return this.peek(0) + } + + /** + * Check if end of stream has been reached. + * + * @public + * @instance + * + * @return {boolean} `true` if at end of stream, `false` otherwise + */ + public get eos(): boolean { + return this.chunks[this.chunks.length - 1] === codes.eof + } + + /** + * Get the next character code without changing the position of the reader. + * + * > 👉 Equivalent to `this.peek()`. + * + * @see {@linkcode Code} * * @public - * @static + * @instance * - * @param {Value | VFile} file - File to tokenize - * @param {Options} options - Tokenization options - * @return {Lexer} New lexer instance + * @return {Code} Next character code */ - public static create(file: Value | VFile, options: Options): Lexer { - return new Lexer(file, options) + public get next(): Code { + return this.peek() + } + + /** + * Get the previous character code without changing the position of the + * reader. + * + * > 👉 Equivalent to `this.peek(-1)`. + * + * @see {@linkcode Code} + * + * @public + * @instance + * + * @return {Code} Previous character code + */ + public get previous(): Code { + return this.peek(-1) } /** @@ -348,7 +458,7 @@ class Lexer { * * @param {ReturnHandle} onreturn - Successful construct callback * @param {boolean | null | undefined} [interrupt] - Interrupting? - * @return {Attempt} attempt/check/interrupt state + * @return {Attempt} attempt/check/interrupt */ protected constructFactory( onreturn: ReturnHandle, @@ -361,7 +471,18 @@ class Lexer { */ const self: this = this - return function hook( + return hook + + /** + * Handle either an object mapping codes to constructs, a list of + * constructs, or a single construct. + * + * @param {Constructs} construct - Constructs to try + * @param {State | undefined} [succ] - Successful tokenization state + * @param {State | undefined} [fail] - Failed tokenization state + * @return {State} Next state + */ + function hook( construct: Constructs, succ: State = /* c8 ignore next */ () => undefined, fail?: State @@ -383,22 +504,24 @@ class Lexer { /** * Construct list. * - * @var {ReadonlyArray} list + * @var {Construct[]} list */ - let list: readonly Construct[] + let list: Construct[] - // handle a single construct, list of constructs, or map of constructs - return 'tokenize' in construct || Array.isArray(construct) - ? handleConstructList([construct].flat()) - : handleConstructMap(construct) + // handle list of constructs, single construct, or map of constructs + return Array.isArray(construct) + ? handleConstructList(construct) + : 'tokenize' in construct + ? handleConstructList([construct]) + : handleConstructRecord(construct) /** * Handle a list of constructs. * - * @param {ReadonlyArray} constructs - Constructs to try + * @param {Construct[]} constructs - Constructs to try * @return {State} Next state */ - function handleConstructList(constructs: readonly Construct[]): State { + function handleConstructList(constructs: Construct[]): State { list = constructs j = 0 @@ -416,30 +539,27 @@ class Lexer { * @param {ConstructRecord} map - Constructs to try * @return {State} Next state */ - function handleConstructMap(map: ConstructRecord): State { - return start + function handleConstructRecord(map: ConstructRecord): State { + return run /** * Check if `value` looks like a construct, or list of constructs. * * @param {unknown} value - Value to check - * @return {value is Construct | ReadonlyArray} `true` if - * value is an object + * @return {value is ConstructPack} `true` if value is an object */ - function is(value: unknown): value is Construct | readonly Construct[] { + function is(value: unknown): value is ConstructPack { return typeof value === 'object' } /** - * Start construct tokenization. - * * @param {Code} code - Current character code * @return {State | undefined} Next state */ - function start(code: Code): State | undefined { + function run(code: Code): State | undefined { return handleConstructList([ ...[code !== null && map[code]].flat().filter(value => is(value)), - ...[code !== null && map.null].flat().filter(value => is(value)) + ...[map.null].flat().filter(value => is(value)) ])(code) } } @@ -454,8 +574,6 @@ class Lexer { return start /** - * Start construct tokenization. - * * @param {Code} code - Current character code * @return {State | undefined} Next state */ @@ -463,12 +581,10 @@ class Lexer { const { context, disabled, effects } = self const { name, partial, previous, test, tokenize } = construct + self.store() currentConstruct = construct - if (!partial) context.currentConstruct = construct - if (fail) self.store() - - context.interrupt = interrupt + context.interrupt = interrupt switch (true) { case !!name && disabled.includes(name): @@ -488,10 +604,10 @@ class Lexer { * @return {State} Next state */ function ok(code: Code): State { - assert(code === self.code, 'expected `code` to equal expected code') + assert(code === self.#code, 'expected `code` to equal expected code') self.debug('ok: `%o`', code) - self.consumed = true + self.#consumed = true onreturn(currentConstruct) return succ @@ -505,10 +621,10 @@ class Lexer { */ function nok(code: Code): State | undefined { assert(list, 'expected construct list') - assert(code === self.code, 'expected `code` to equal expected code') + assert(code === self.#code, 'expected `code` to equal expected code') self.debug('nok: `%o`', code) - self.consumed = true + self.#consumed = true self.restore() return ++j < list.length ? handleConstruct(list[j]!) : fail @@ -528,13 +644,37 @@ class Lexer { * @return {undefined} Nothing */ protected consume(code: Code): undefined { - assert(code === this.code, 'expected `code` to equal expected code') + assert(code === this.#code, 'expected `code` to equal expected code') this.debug('consume: `%o`', code) - assert(this.consumed === null, 'expected unconsumed code') - code !== codes.eof ? this.reader.read() : (this.eof = true) - return void (this.consumed = true) + assert(this.#consumed === null, 'expected unconsumed code') + this.read() + this.#consumed = true + return void code + } + + /* c8 ignore start */ + + /** + * Define a skip. + * + * @see {@linkcode DefineSkip} + * @see {@linkcode Point} + * + * @todo test + * + * @public + * @instance + * + * @param {Pick} point - Skip point + * @return {undefined} Nothing + */ + public defineSkip(point: Pick): undefined { + this.skips[point.line] = point.column + return this.skip(), void this.debug('position: define skip: `%j`', point) } + /* c8 ignore stop */ + /** * Start a new token. * @@ -546,12 +686,12 @@ class Lexer { * @instance * * @param {TokenType} type - Token type - * @param {(Partial | null)?} fields - Token fields + * @param {TokenFields | null | undefined} [fields] - Token fields * @return {Token} Open token */ protected enter( type: TokenType, - fields?: Partial | null + fields?: TokenFields | null | undefined ): Token { /** * New token. @@ -560,21 +700,16 @@ class Lexer { */ const token: Token = this.token(type, { ...fields, - end: this.reader.point(-1), - start: this.now() + start: this.now(), // eslint-disable-next-line sort-keys + end: this.now() }) - // shift/replace/init tail - if ((this.head)) { - token.previous = this.tail - this.tail.next = token - this.tail = this.tail.next - } else { - this.head = this.tail = token - } - + assert(typeof type === 'string', 'expected `type` to be a string') + assert(type.length > 0, 'expected `type` to be a non-empty string') this.debug('enter: `%s`; %o', type, token.start) + this.events.push([ev.enter, token, this.context]) + this.stack.push(token) return token } @@ -594,28 +729,13 @@ class Lexer { protected exit(type: TokenType): Token { assert(typeof type === 'string', 'expected `type` to be a string') assert(type.length > 0, 'expected `type` to be a non-empty string') - assert(this.events.length, 'expected events') /** * Token to close. * - * @var {Token | undefined} token + * @const {Token | undefined} token */ - let token: Token | undefined = this.tail - - // find open token - while (token) { - if ( - !!token.start.column && - !!token.start.line && - token.start.offset >= 0 && - token.end.column + token.end.line + token.end.offset === -3 - ) { - break - } - - token = token.previous - } + const token: Token | undefined = this.stack.pop() assert(token, 'cannot exit without open token') assert(type === token.type, 'expected exit token to match current token') @@ -641,10 +761,10 @@ class Lexer { * @return {undefined} Nothing */ protected go(code: Code): undefined { - assert(this.consumed, `expected code \`${code}\` to be consumed`) - this.consumed = null + assert(this.#consumed, `expected code \`${code}\` to be consumed`) + this.#consumed = null this.debug('go: `%o`, %j', code, /* c8 ignore next */ this.state?.name) - this.code = code + this.#code = code assert(typeof this.state === 'function', 'expected state function') this.state = this.state(code) return void code @@ -653,15 +773,70 @@ class Lexer { /** * Get the current point in the file. * - * @see {@linkcode Point} + * @see {@linkcode Place} + * + * @public + * @instance + * + * @return {Place} Current point in file, relative to {@linkcode start} + */ + public now(): Place { + const { _index, column, line, offset } = this.place + // eslint-disable-next-line sort-keys + return { line, column, offset, _index } + } + + /** + * Get the next `k`-th character code from the file without changing the + * position of the reader. + * + * @see {@linkcode Code} + * + * @public + * @instance + * + * @param {number?} [k=1] - Difference between index of next `k`-th character + * code and index of current character code + * @return {Code} Peeked character code + */ + public peek(k: number = 1): Code { + return this.chunks[this.place._index + k] ?? codes.eof + } + + /** + * Get the next character code. + * + * Unlike {@linkcode peek}, this method changes the position of the reader. + * + * @see {@linkcode Code} * * @protected * @instance * - * @return {Point} Current point in file + * @return {Code} Next character code */ - protected now(): Point { - return this.reader.now() + protected read(): Code { + /** + * Current character code. + * + * @const {Code} code + */ + const code: Code = this.code + + if (this.eol(code)) { + this.place.column = 1 + this.place.line++ + this.place.offset += code === codes.crlf ? 2 : 1 + this.skip() + this.debug('position after eol: %o', this.place) + } else if (code !== codes.vs && code !== codes.eof) { + this.place.column++ + this.place.offset++ + } else if (code === codes.vs && this.previous === codes.vht) { + this.place.column++ + } + + return this.chunks[++this.place._index] ?? codes.eof } /** @@ -686,8 +861,6 @@ class Lexer { } if (construct.resolve) { - assert(lastEvent >= 0, 'expected last event index') - splice( this.events, lastEvent, @@ -702,8 +875,8 @@ class Lexer { assert( /* c8 ignore next 3 */ !!construct.partial || - !this.context.events.length || - this.context.events[this.context.events.length - 1]![0] === ev.exit, + !this.events.length || + this.events[this.events.length - 1]![0] === ev.exit, 'expected last token to end' ) @@ -711,7 +884,8 @@ class Lexer { } /** - * Restore the last construct, event index, reader position, and tail token. + * Restore the last construct, event index, location, tail token, and token + * stack. * * @protected * @instance @@ -719,22 +893,117 @@ class Lexer { * @return {undefined} Nothing */ protected restore(): undefined { - assert(this.lastEvent >= 0, 'expected last event index') - assert(this.lastIndex >= 0, 'expected last reader position') - assert(this.lastToken, 'expected last token') - - this.reader.read(this.lastIndex - this.reader.index) this.context.currentConstruct = this.lastConstruct this.events.length = this.lastEvent - this.tail = this.lastToken - this.tail.next = undefined + this.place = { ...this.lastPlace } + this.stack = [...this.lastStack] + return void this.debug('restore: %o', this.now()) + } - this.debug('restore: %o', this.now()) - return void this + /** + * Move the current point a bit forward in the line when on a column skip. + * + * @todo test + * + * @protected + * @instance + * + * @return {undefined} Nothing + */ + protected skip(): undefined { + /* c8 ignore next 4 */ + if (this.place.line in this.skips && this.place.column < 2) { + this.place.column = this.skips[this.place.line]! + this.place.offset += this.place.column - 1 + } + + return void this.place + } + + /** + * Get the text spanning `range` without changing the position of the reader. + * + * @see {@linkcode Position} + * + * @public + * @instance + * + * @param {Position} range - Position in stream + * @param {boolean | null | undefined} [expandTabs] - Expand tabs? + * @return {string} Serialized slice + */ + public sliceSerialize( + range: Position, + expandTabs?: boolean | null | undefined + ): string { + /** + * Character code slice. + * + * @const {Code[]} slice + */ + const slice: Code[] = this.sliceStream(range) + + /** + * Serialized character code array. + * + * @const {string[]} result + */ + const result: string[] = [] + + /** + * Current code represents horizontal tab? + * + * @var {boolean} tab + */ + let tab: boolean = false + + for (const code of slice) { + switch (code) { + case codes.crlf: + result.push(chars.crlf) + break + case codes.vcr: + result.push(chars.cr) + break + case codes.vht: + result.push(expandTabs ? chars.space : chars.ht) + break + case codes.vlf: + result.push(chars.lf) + break + case codes.vs: + if (!expandTabs && tab) continue + result.push(chars.space) + break + default: + result.push(String.fromCodePoint(code!)) + } + + tab = code === codes.vht + } + + return result.join(chars.empty) + } + + /** + * Get the chunks spanning `range`. + * + * @see {@linkcode Code} + * @see {@linkcode Position} + * + * @public + * @instance + * + * @param {Position} range - Position in stream + * @return {Code[]} List of chunks + */ + public sliceStream(range: Position): Code[] { + return this.chunks.slice(range.start._index, range.end._index) } /** - * Store the current construct, event index reader position, and tail token. + * Store the current construct, event index, location, tail token, and token + * stack. * * @protected * @instance @@ -744,27 +1013,74 @@ class Lexer { protected store(): undefined { this.lastConstruct = this.context.currentConstruct this.lastEvent = this.events.length - this.lastIndex = this.reader.index - this.lastToken = this.tail - + this.lastPlace = this.now() + this.lastStack = [...this.stack] return void this } /** - * Tokenize the file. + * Main loop to walk through {@linkcode chunks}. * - * @public + * > 👉 The {@linkcode read} method modifies `_index` in {@linkcode place} to + * > advance the loop until end of stream. + * + * @protected * @instance * * @return {this} `this` lexer */ - public tokenize(): this { - while (!this.eof) this.go(this.reader.output) + protected tokenize(): this { + while (this.place._index < this.chunks.length) this.go(this.code) + this.eos && this.state && this.go(this.code) + return this + } - this.resolve(this.initialize, 0) - this.events = resolveAll(this.resolveAll, this.events, this.context) + /** + * Write a slice of chunks. + * + * The eof code (`null`) can be used to signal end of stream. + * + * @see {@linkcode Chunk} + * @see {@linkcode Event} + * + * @public + * @instance + * + * @param {Chunk[]} slice - Chunks + * @return {Event[]} List of events + */ + public write(slice: Chunk[]): Event[] { + /** + * New chunks. + * + * @const {Code[]} chunks + */ + const chunks: Code[] = slice.flatMap(chunk => { + /* c8 ignore next 2 */ return typeof chunk === 'string' + ? this.preprocess(chunk) + : chunk + }) - return this + this.chunks = push(this.chunks, chunks) + this.tokenize() + + // exit if not done, resolvers might change stuff + /* c8 ignore next */ if (!this.eos) return [] + + /** + * Constructs with `resolveAll` handlers. + * + * > 👉 Includes partial constructs. + * + * @const {Partial[]} constructs + */ + const constructs: Partial[] = [ + ...this.resolveAll, + ...this.resolvers.map(resolveAll => ({ resolveAll })) + ] + + this.resolve(this.initialize, 0) + return this.events = resolveAll(constructs, this.events, this.context) } } diff --git a/src/preprocess.ts b/src/preprocess.ts new file mode 100644 index 0000000..fca6271 --- /dev/null +++ b/src/preprocess.ts @@ -0,0 +1,136 @@ +/** + * @file preprocess + * @module vfile-lexer/preprocess + */ + +import { codes } from './enums' +import type { PreprocessOptions } from './interfaces' +import type { + Code, + Column, + Encoding, + FileLike, + Preprocessor, + Value +} from './types' + +/** + * Create a preprocessor to turn a value into character code chunks. + * + * @see {@linkcode PreprocessOptions} + * @see {@linkcode Preprocessor} + * + * @param {PreprocessOptions | null | undefined} [options] - Configuration + * @return {Preprocessor} Character code preprocessor + */ +function preprocess( + this: void, + options?: PreprocessOptions | null | undefined +): Preprocessor { + const { tabSize = 2 } = options ?? {} + return preprocessor + + /** + * Turn `value` into character code chunks. + * + * @param {FileLike | Value | null | undefined} value - Value to preprocess + * @param {Encoding | null | undefined} [encoding] - Character encoding to use + * when value or its contents is {@linkcode Uint8Array} + * @param {boolean | null | undefined} [end] - End of stream? + * @return {Code[]} Character code chunks + */ + function preprocessor( + value: FileLike | Value | null | undefined, + encoding?: Encoding | null | undefined, + end?: boolean | null | undefined + ): Code[] { + /** + * Character code chunks. + * + * @const {Code[]} chunks + */ + const chunks: Code[] = [] + + if ( + (typeof value === 'string' && value) || + (typeof value === 'object' && value) + ) { + value = typeof value === 'object' && 'value' in value + ? value.value + : value + + value = typeof value === 'string' + ? value.toString() + : new TextDecoder(encoding ?? undefined).decode(value) + + /** + * Current column. + * + * @var {Column} column + */ + let column: Column = 1 + + /** + * Index of current character code. + * + * @var {number} index + */ + let index: number = 0 + + while (index < value.length) { + /** + * Character code. + * + * @var {NonNullable} code + */ + let code: NonNullable = value[index]!.codePointAt(0)! + + /** + * Difference between next column and current column. + * + * @var {number} k + */ + let k: number = 1 + + switch (true) { + case code === codes.cr: + if (value[index + 1]?.codePointAt(0) === codes.lf) { + chunks.push(codes.crlf) + k++ + } else { + chunks.push(codes.vcr) + } + + column = 1 + break + case code === codes.ht: + /** + * Next column. + * + * @const {number} n + */ + const n: number = Math.ceil(column / tabSize) * tabSize + + chunks.push(codes.vht) + while (column++ < n) chunks.push(codes.vs) + + break + case code === codes.lf: + chunks.push(codes.vlf) + column = 1 + break + default: + chunks.push(code) + column++ + break + } + + index += k + } + } + + return end && chunks.push(codes.eof), chunks + } +} + +export default preprocess diff --git a/src/tokenize.ts b/src/tokenize.ts index b6032d3..efb0cce 100644 --- a/src/tokenize.ts +++ b/src/tokenize.ts @@ -3,24 +3,80 @@ * @module vfile-lexer/tokenize */ -import type { Options } from '#src/interfaces' -import type { VFile, Value } from 'vfile' +import type { + Encoding, + Event, + FileLike, + TokenizeOptions, + Value +} from '#src/types' import Lexer from './lexer' +import preprocess from './preprocess' /** - * Tokenize `file`. + * Tokenize `value`. * - * @see {@linkcode Lexer} - * @see {@linkcode Options} - * @see {@linkcode VFile} + * @see {@linkcode Encoding} + * @see {@linkcode Event} + * @see {@linkcode FileLike} + * @see {@linkcode TokenizeOptions} * @see {@linkcode Value} * - * @param {Value | VFile} file - File to tokenize - * @param {Options} options - Lexer options - * @return {Lexer} Lexer instance + * @param {FileLike | Value | null | undefined} value - Value to tokenize + * @param {Encoding | null | undefined} [encoding] - Character encoding to use + * when `value` or its contents is {@linkcode Uint8Array} + * @param {TokenizeOptions | null | undefined} [options] - Configuration options + * @return {Event[]} List of events */ -function tokenize(file: Value | VFile, options: Options): Lexer { - return Lexer.create(file, options).tokenize() +function tokenize( + value: FileLike | Value | null | undefined, + encoding?: Encoding | null | undefined, + options?: TokenizeOptions | null | undefined +): Event[] + +/** + * Tokenize `value`. + * + * @see {@linkcode FileLike} + * @see {@linkcode Event} + * @see {@linkcode TokenizeOptions} + * @see {@linkcode Value} + * + * @param {FileLike | Value | null | undefined} value - Value to tokenize + * @param {TokenizeOptions | null | undefined} [options] - Tokenize options + * @return {Event[]} List of events + */ +function tokenize( + value: FileLike | Value | null | undefined, + options?: TokenizeOptions | null | undefined +): Event[] + +/** + * Tokenize `value`. + * + * @see {@linkcode FileLike} + * @see {@linkcode Event} + * @see {@linkcode TokenizeOptions} + * @see {@linkcode Value} + * + * @param {FileLike | Value | null | undefined} value - Value to tokenize + * @param {Encoding | TokenizeOptions | null | undefined} [encoding] - Character + * encoding to use when `value` or its contents is {@linkcode Uint8Array}, or + * configuration options + * @param {TokenizeOptions | null | undefined} [options] - Configuration options + * @return {Event[]} List of events + */ +function tokenize( + value: FileLike | Value | null | undefined, + encoding?: Encoding | TokenizeOptions | null | undefined, + options?: TokenizeOptions | null | undefined +): Event[] { + if (typeof encoding === 'object' && encoding) { + options = encoding + encoding = undefined + } + + return new Lexer(options).write(preprocess(options)(value, encoding, true)) } export default tokenize diff --git a/src/types/__tests__/chunk.spec-d.ts b/src/types/__tests__/chunk.spec-d.ts new file mode 100644 index 0000000..ab5ffb9 --- /dev/null +++ b/src/types/__tests__/chunk.spec-d.ts @@ -0,0 +1,17 @@ +/** + * @file Type Tests - Chunk + * @module vfile-lexer/types/tests/unit-d/Chunk + */ + +import type TestSubject from '../chunk' +import type Code from '../code' + +describe('unit-d:types/Chunk', () => { + it('should extract Code', () => { + expectTypeOf().extract().not.toBeNever() + }) + + it('should extract string', () => { + expectTypeOf().extract().not.toBeNever() + }) +}) diff --git a/src/types/__tests__/code-check.spec-d.ts b/src/types/__tests__/code-check.spec-d.ts new file mode 100644 index 0000000..45a7859 --- /dev/null +++ b/src/types/__tests__/code-check.spec-d.ts @@ -0,0 +1,21 @@ +/** + * @file Type Tests - CodeCheck + * @module vfile-reader/types/tests/unit-d/CodeCheck + */ + +import type Code from '../code' +import type TestSubject from '../code-check' + +describe('unit-d:types/CodeCheck', () => { + describe('parameters', () => { + it('should be callable with [Code]', () => { + expectTypeOf().parameters.toEqualTypeOf<[Code]>() + }) + }) + + describe('returns', () => { + it('should return boolean', () => { + expectTypeOf().returns.toEqualTypeOf() + }) + }) +}) diff --git a/src/types/__tests__/code.spec-d.ts b/src/types/__tests__/code.spec-d.ts new file mode 100644 index 0000000..eac6059 --- /dev/null +++ b/src/types/__tests__/code.spec-d.ts @@ -0,0 +1,16 @@ +/** + * @file Type Tests - Code + * @module vfile-lexer/types/tests/unit-d/Code + */ + +import type TestSubject from '../code' + +describe('unit-d:types/Code', () => { + it('should extract null', () => { + expectTypeOf().extract().not.toBeNever() + }) + + it('should extract number', () => { + expectTypeOf().extract().not.toBeNever() + }) +}) diff --git a/src/types/__tests__/construct-pack.spec-d.ts b/src/types/__tests__/construct-pack.spec-d.ts new file mode 100644 index 0000000..6e00b43 --- /dev/null +++ b/src/types/__tests__/construct-pack.spec-d.ts @@ -0,0 +1,17 @@ +/** + * @file Type Tests - ConstructPack + * @module vfile-lexer/types/tests/unit-d/ConstructPack + */ + +import type { Construct } from '#src/interfaces' +import type TestSubject from '../construct-pack' + +describe('unit-d:types/ConstructPack', () => { + it('should extract Construct', () => { + expectTypeOf().extract().not.toBeNever() + }) + + it('should extract Construct[]', () => { + expectTypeOf().extract().not.toBeNever() + }) +}) diff --git a/src/types/__tests__/construct-record.spec-d.ts b/src/types/__tests__/construct-record.spec-d.ts deleted file mode 100644 index 5b8cdb4..0000000 --- a/src/types/__tests__/construct-record.spec-d.ts +++ /dev/null @@ -1,31 +0,0 @@ -/** - * @file Type Tests - ConstructRecord - * @module vfile-lexer/types/tests/unit-d/ConstructRecord - */ - -import type { Nilable } from '@flex-development/tutils' -import { codes } from '@flex-development/vfile-reader' -import type TestSubject from '../construct-record' -import type RecordConstructs from '../constructs-record' - -describe('unit-d:types/ConstructRecord', () => { - type Value = Nilable - - it('should match [[x: `${number}`]: RecordConstructs | null | undefined]', () => { - expectTypeOf() - .toHaveProperty(`${codes.backtick}`) - .toEqualTypeOf() - }) - - it('should match [[x: "null"]: RecordConstructs | null | undefined]', () => { - expectTypeOf() - .toHaveProperty(`${codes.eof}`) - .toEqualTypeOf() - }) - - it('should match [[x: number]: RecordConstructs | null | undefined]', () => { - expectTypeOf() - .toHaveProperty(codes.at) - .toEqualTypeOf() - }) -}) diff --git a/src/types/__tests__/constructs-record.spec-d.ts b/src/types/__tests__/constructs-record.spec-d.ts deleted file mode 100644 index 05b7840..0000000 --- a/src/types/__tests__/constructs-record.spec-d.ts +++ /dev/null @@ -1,21 +0,0 @@ -/** - * @file Type Tests - RecordConstructs - * @module vfile-lexer/types/tests/unit-d/RecordConstructs - */ - -import type { Construct } from '#src/interfaces' -import type TestSubject from '../constructs-record' - -describe('unit-d:types/RecordConstructs', () => { - it('should extract Construct', () => { - expectTypeOf().extract().not.toBeNever() - }) - - it('should extract Construct[]', () => { - expectTypeOf().extract().not.toBeNever() - }) - - it('should extract readonly Construct[]', () => { - expectTypeOf().extract().not.toBeNever() - }) -}) diff --git a/src/types/__tests__/constructs.spec-d.ts b/src/types/__tests__/constructs.spec-d.ts index 6185033..84a6888 100644 --- a/src/types/__tests__/constructs.spec-d.ts +++ b/src/types/__tests__/constructs.spec-d.ts @@ -3,16 +3,16 @@ * @module vfile-lexer/types/tests/unit-d/Constructs */ -import type ConstructRecord from '../construct-record' +import type { ConstructRecord } from '#src/interfaces' +import type ConstructPack from '../construct-pack' import type TestSubject from '../constructs' -import type RecordConstructs from '../constructs-record' describe('unit-d:types/Constructs', () => { it('should extract ConstructRecord', () => { expectTypeOf().extract().not.toBeNever() }) - it('should extract RecordConstructs', () => { - expectTypeOf().extract().not.toBeNever() + it('should extract ConstructPack', () => { + expectTypeOf().extract().not.toBeNever() }) }) diff --git a/src/types/__tests__/consume.spec-d.ts b/src/types/__tests__/consume.spec-d.ts index 2634685..edfb494 100644 --- a/src/types/__tests__/consume.spec-d.ts +++ b/src/types/__tests__/consume.spec-d.ts @@ -3,7 +3,7 @@ * @module vfile-lexer/types/tests/unit-d/Consume */ -import type { Code } from '@flex-development/vfile-reader' +import type Code from '../code' import type TestSubject from '../consume' describe('unit-d:types/Consume', () => { diff --git a/src/types/__tests__/define-skip.spec-d.ts b/src/types/__tests__/define-skip.spec-d.ts new file mode 100644 index 0000000..aa1f160 --- /dev/null +++ b/src/types/__tests__/define-skip.spec-d.ts @@ -0,0 +1,25 @@ +/** + * @file Type Tests - DefineSkip + * @module vfile-lexer/types/tests/unit-d/DefineSkip + */ + +import type { Point } from '#src/interfaces' +import type TestSubject from '../define-skip' + +describe('unit-d:types/DefineSkip', () => { + describe('parameters', () => { + it('should be callable with [Pick]', () => { + // Arrange + type P = [Pick] + + // Expect + expectTypeOf().parameters.toEqualTypeOf

() + }) + }) + + describe('returns', () => { + it('should return undefined', () => { + expectTypeOf().returns.toEqualTypeOf() + }) + }) +}) diff --git a/src/types/__tests__/encoding.spec-d.ts b/src/types/__tests__/encoding.spec-d.ts new file mode 100644 index 0000000..2773bb9 --- /dev/null +++ b/src/types/__tests__/encoding.spec-d.ts @@ -0,0 +1,32 @@ +/** + * @file Type Tests - Encoding + * @module vfile-lexer/types/tests/unit-d/Encoding + */ + +import type TestSubject from '../encoding' + +describe('unit-d:types/Encoding', () => { + it('should extract "unicode-1-1-utf-8"', () => { + expectTypeOf().extract<'unicode-1-1-utf-8'>().not.toBeNever() + }) + + it('should extract "utf-16be"', () => { + expectTypeOf().extract<'utf-16be'>().not.toBeNever() + }) + + it('should extract "utf-16le"', () => { + expectTypeOf().extract<'utf-16le'>().not.toBeNever() + }) + + it('should extract "utf-8"', () => { + expectTypeOf().extract<'utf-8'>().not.toBeNever() + }) + + it('should extract "utf16"', () => { + expectTypeOf().extract<'utf16'>().not.toBeNever() + }) + + it('should extract "utf8"', () => { + expectTypeOf().extract<'utf8'>().not.toBeNever() + }) +}) diff --git a/src/types/__tests__/enter.spec-d.ts b/src/types/__tests__/enter.spec-d.ts index 3b4a1d5..4850d43 100644 --- a/src/types/__tests__/enter.spec-d.ts +++ b/src/types/__tests__/enter.spec-d.ts @@ -3,16 +3,15 @@ * @module vfile-lexer/types/tests/unit-d/Enter */ -import type { Token } from '#src/interfaces' +import type { Token, TokenFields } from '#src/interfaces' import type TestSubject from '../enter' -import type TokenFields from '../token-fields' import type TokenType from '../token-type' describe('unit-d:types/Enter', () => { describe('parameters', () => { - it('should be callable with [TokenType, (Partial | null | undefined)?]', () => { + it('should be callable with [TokenType, (TokenFields | null | undefined)?]', () => { // Arrange - type P = [TokenType, (Partial | null | undefined)?] + type P = [TokenType, (TokenFields | null | undefined)?] // Expect expectTypeOf().parameters.toEqualTypeOf

() diff --git a/src/types/__tests__/event-type.spec-d.ts b/src/types/__tests__/event-type.spec-d.ts index 29f13b8..4c18096 100644 --- a/src/types/__tests__/event-type.spec-d.ts +++ b/src/types/__tests__/event-type.spec-d.ts @@ -7,10 +7,6 @@ import type { ev } from '#src/enums' import type TestSubject from '../event-type' describe('unit-d:types/EventType', () => { - it('should extract ev', () => { - expectTypeOf().extract().not.toBeNever() - }) - it('should extract keyof typeof ev', () => { expectTypeOf().extract().not.toBeNever() }) diff --git a/src/types/__tests__/event.spec-d.ts b/src/types/__tests__/event.spec-d.ts index 60d645c..589f119 100644 --- a/src/types/__tests__/event.spec-d.ts +++ b/src/types/__tests__/event.spec-d.ts @@ -3,13 +3,13 @@ * @module vfile-lexer/types/tests/unit-d/Event */ -import type tk from '#fixtures/tk' +import type tt from '#fixtures/tt' import type { Token, TokenizeContext } from '#src/interfaces' import type TestSubject from '../event' import type EventType from '../event-type' describe('unit-d:types/Event', () => { - type T = tk.whitespace + type T = tt.typeMetadata type Subject = TestSubject it('should match [0: EventType]', () => { diff --git a/src/types/__tests__/file-like.spec-d.ts b/src/types/__tests__/file-like.spec-d.ts new file mode 100644 index 0000000..5ea7fe7 --- /dev/null +++ b/src/types/__tests__/file-like.spec-d.ts @@ -0,0 +1,13 @@ +/** + * @file Type Tests - FileLike + * @module vfile-lexer/types/tests/unit-d/FileLike + */ + +import type TestSubject from '../file-like' +import type Value from '../value' + +describe('unit-d:types/FileLike', () => { + it('should match [value: Value]', () => { + expectTypeOf().toHaveProperty('value').toMatchTypeOf() + }) +}) diff --git a/src/types/__tests__/finalize-context.spec-d.ts b/src/types/__tests__/finalize-context.spec-d.ts index 1140a55..02cc688 100644 --- a/src/types/__tests__/finalize-context.spec-d.ts +++ b/src/types/__tests__/finalize-context.spec-d.ts @@ -18,9 +18,9 @@ describe('unit-d:types/FinalizeContext', () => { }) describe('returns', () => { - it('should return TokenizeContext | null | undefined | void', () => { + it('should return TokenizeContext | null | undefined', () => { // Arrange - type Expect = TokenizeContext | null | undefined | void + type Expect = TokenizeContext | null | undefined // Expect expectTypeOf().returns.toEqualTypeOf() diff --git a/src/types/__tests__/guard.spec-d.ts b/src/types/__tests__/guard.spec-d.ts index dfbdf8e..aac623e 100644 --- a/src/types/__tests__/guard.spec-d.ts +++ b/src/types/__tests__/guard.spec-d.ts @@ -4,7 +4,7 @@ */ import type { TokenizeContext } from '#src/interfaces' -import type { Code } from '@flex-development/vfile-reader' +import type Code from '../code' import type TestSubject from '../guard' describe('unit-d:types/Guard', () => { diff --git a/src/types/__tests__/initializer.spec-d.ts b/src/types/__tests__/initializer.spec-d.ts index ec6057a..a792cdb 100644 --- a/src/types/__tests__/initializer.spec-d.ts +++ b/src/types/__tests__/initializer.spec-d.ts @@ -3,8 +3,7 @@ * @module vfile-lexer/types/tests/unit-d/Initializer */ -import type { TokenizeContext } from '#src/interfaces' -import type Effects from '../effects' +import type { Effects, TokenizeContext } from '#src/interfaces' import type TestSubject from '../initializer' import type State from '../state' diff --git a/src/types/__tests__/now.spec-d.ts b/src/types/__tests__/now.spec-d.ts new file mode 100644 index 0000000..47c8ec6 --- /dev/null +++ b/src/types/__tests__/now.spec-d.ts @@ -0,0 +1,22 @@ +/** + * @file Type Tests - Now + * @module vfile-lexer/types/tests/unit-d/Now + */ + +import type { Place } from '#src/interfaces' +import type { EmptyArray } from '@flex-development/tutils' +import type TestSubject from '../now' + +describe('unit-d:types/Now', () => { + describe('parameters', () => { + it('should be callable with []', () => { + expectTypeOf().parameters.toEqualTypeOf() + }) + }) + + describe('returns', () => { + it('should return Place', () => { + expectTypeOf().returns.toEqualTypeOf() + }) + }) +}) diff --git a/src/types/__tests__/preprocessor.spec-d.ts b/src/types/__tests__/preprocessor.spec-d.ts new file mode 100644 index 0000000..2dea09c --- /dev/null +++ b/src/types/__tests__/preprocessor.spec-d.ts @@ -0,0 +1,32 @@ +/** + * @file Type Tests - Preprocessor + * @module vfile-lexer/types/tests/unit-d/Preprocessor + */ + +import type Code from '../code' +import type Encoding from '../encoding' +import type FileLike from '../file-like' +import type TestSubject from '../preprocessor' +import type Value from '../value' + +describe('unit-d:types/Preprocessor', () => { + describe('parameters', () => { + it('should be callable with [FileLike | Value | null | undefined, (Encoding | null | undefined)?, (boolean | null | undefined)?]', () => { + // Arrange + type P = [ + value: FileLike | Value | null | undefined, + encoding?: Encoding | null | undefined, + end?: boolean | null | undefined + ] + + // Expect + expectTypeOf().parameters.toEqualTypeOf

() + }) + }) + + describe('returns', () => { + it('should return Code[]', () => { + expectTypeOf().returns.toEqualTypeOf() + }) + }) +}) diff --git a/src/types/__tests__/slice-serialize.spec-d.ts b/src/types/__tests__/slice-serialize.spec-d.ts new file mode 100644 index 0000000..07ed1ac --- /dev/null +++ b/src/types/__tests__/slice-serialize.spec-d.ts @@ -0,0 +1,28 @@ +/** + * @file Type Tests - SliceSerialize + * @module vfile-lexer/types/tests/unit-d/SliceSerialize + */ + +import type { Position } from '#src/interfaces' +import type TestSubject from '../slice-serialize' + +describe('unit-d:types/SliceSerialize', () => { + describe('parameters', () => { + it('should be callable with [Position, (boolean | null | undefined)?]', () => { + // Arrange + type P = [ + range: Position, + expandTabs?: boolean | null | undefined + ] + + // Expect + expectTypeOf().parameters.toEqualTypeOf

() + }) + }) + + describe('returns', () => { + it('should return string', () => { + expectTypeOf().returns.toEqualTypeOf() + }) + }) +}) diff --git a/src/types/__tests__/slice-stream.spec-d.ts b/src/types/__tests__/slice-stream.spec-d.ts new file mode 100644 index 0000000..2d7a185 --- /dev/null +++ b/src/types/__tests__/slice-stream.spec-d.ts @@ -0,0 +1,22 @@ +/** + * @file Type Tests - SliceStream + * @module vfile-lexer/types/tests/unit-d/SliceStream + */ + +import type { Position } from '#src/interfaces' +import type Code from '../code' +import type TestSubject from '../slice-stream' + +describe('unit-d:types/SliceStream', () => { + describe('parameters', () => { + it('should be callable with [Position]', () => { + expectTypeOf().parameters.toEqualTypeOf<[Position]>() + }) + }) + + describe('returns', () => { + it('should return Code[]', () => { + expectTypeOf().returns.toEqualTypeOf() + }) + }) +}) diff --git a/src/types/__tests__/state.spec-d.ts b/src/types/__tests__/state.spec-d.ts index 8291f11..a7f5d4e 100644 --- a/src/types/__tests__/state.spec-d.ts +++ b/src/types/__tests__/state.spec-d.ts @@ -4,7 +4,7 @@ */ import type { Optional } from '@flex-development/tutils' -import type { Code } from '@flex-development/vfile-reader' +import type Code from '../code' import type TestSubject from '../state' describe('unit-d:types/State', () => { diff --git a/src/types/__tests__/token-factory.spec-d.ts b/src/types/__tests__/token-factory.spec-d.ts index 8e616e6..89c7f70 100644 --- a/src/types/__tests__/token-factory.spec-d.ts +++ b/src/types/__tests__/token-factory.spec-d.ts @@ -3,16 +3,15 @@ * @module vfile-lexer/types/tests/unit-d/TokenFactory */ -import type { Token } from '#src/interfaces' +import type { Token, TokenInfo } from '#src/interfaces' import type TestSubject from '../token-factory' -import type TokenFields from '../token-fields' import type TokenType from '../token-type' describe('unit-d:types/TokenFactory', () => { describe('parameters', () => { - it('should be callable with [TokenType, TokenFields]', () => { + it('should be callable with [TokenType, TokenInfo]', () => { // Arrange - type P = [TokenType, TokenFields] + type P = [TokenType, TokenInfo] // Expect expectTypeOf().parameters.toEqualTypeOf

() diff --git a/src/types/__tests__/token-fields.spec-d.ts b/src/types/__tests__/token-fields.spec-d.ts deleted file mode 100644 index 1703478..0000000 --- a/src/types/__tests__/token-fields.spec-d.ts +++ /dev/null @@ -1,13 +0,0 @@ -/** - * @file Type Tests - TokenFields - * @module vfile-lexer/types/tests/unit-d/TokenFields - */ - -import type { Token } from '#src/interfaces' -import type TestSubject from '../token-fields' - -describe('unit-d:types/TokenFields', () => { - it('should equal Omit', () => { - expectTypeOf().toEqualTypeOf>() - }) -}) diff --git a/src/types/__tests__/tokenize-options.spec-d.ts b/src/types/__tests__/tokenize-options.spec-d.ts new file mode 100644 index 0000000..ec44bba --- /dev/null +++ b/src/types/__tests__/tokenize-options.spec-d.ts @@ -0,0 +1,17 @@ +/** + * @file Type Tests - TokenizeOptions + * @module vfile-lexer/types/tests/unit-d/TokenizeOptions + */ + +import type { Options, PreprocessOptions } from '#src/interfaces' +import type TestSubject from '../tokenize-options' + +describe('unit-d:types/TokenizeOptions', () => { + it('should match Options', () => { + expectTypeOf().toMatchTypeOf() + }) + + it('should match PreprocessOptions', () => { + expectTypeOf().toMatchTypeOf() + }) +}) diff --git a/src/types/__tests__/tokenizer.spec-d.ts b/src/types/__tests__/tokenizer.spec-d.ts index 308614d..8f8bc35 100644 --- a/src/types/__tests__/tokenizer.spec-d.ts +++ b/src/types/__tests__/tokenizer.spec-d.ts @@ -3,8 +3,7 @@ * @module vfile-lexer/types/tests/unit-d/Tokenizer */ -import type { TokenizeContext } from '#src/interfaces' -import type Effects from '../effects' +import type { Effects, TokenizeContext } from '#src/interfaces' import type State from '../state' import type TestSubject from '../tokenizer' diff --git a/src/types/__tests__/value.spec-d.ts b/src/types/__tests__/value.spec-d.ts new file mode 100644 index 0000000..b23785e --- /dev/null +++ b/src/types/__tests__/value.spec-d.ts @@ -0,0 +1,16 @@ +/** + * @file Type Tests - Value + * @module vfile-lexer/types/tests/unit-d/Value + */ + +import type TestSubject from '../value' + +describe('unit-d:types/Value', () => { + it('should extract Uint8Array', () => { + expectTypeOf().extract().not.toBeNever() + }) + + it('should extract string', () => { + expectTypeOf().extract().not.toBeNever() + }) +}) diff --git a/src/types/__tests__/write.spec-d.ts b/src/types/__tests__/write.spec-d.ts new file mode 100644 index 0000000..92c2b99 --- /dev/null +++ b/src/types/__tests__/write.spec-d.ts @@ -0,0 +1,22 @@ +/** + * @file Type Tests - Write + * @module vfile-lexer/types/tests/unit-d/Write + */ + +import type Chunk from '../chunk' +import type Event from '../event' +import type TestSubject from '../write' + +describe('unit-d:types/Write', () => { + describe('parameters', () => { + it('should be callable with [Chunk[]]', () => { + expectTypeOf().parameters.toEqualTypeOf<[Chunk[]]>() + }) + }) + + describe('returns', () => { + it('should return Event[]', () => { + expectTypeOf().returns.toEqualTypeOf() + }) + }) +}) diff --git a/src/types/chunk.ts b/src/types/chunk.ts new file mode 100644 index 0000000..86a1ca0 --- /dev/null +++ b/src/types/chunk.ts @@ -0,0 +1,15 @@ +/** + * @file Type Aliases - Chunk + * @module vfile-lexer/types/Chunk + */ + +import type Code from './code' + +/** + * A character code or slice of a buffer in the form of a string. + * + * @see {@linkcode Code} + */ +type Chunk = Code | string + +export type { Chunk as default } diff --git a/src/types/code-check.ts b/src/types/code-check.ts new file mode 100644 index 0000000..c494a5d --- /dev/null +++ b/src/types/code-check.ts @@ -0,0 +1,18 @@ +/** + * @file Type Aliases - DefineSkip + * @module vfile-lexer/types/DefineSkip + */ + +import type Code from './code' + +/** + * Check whether a character code passes a test. + * + * @see {@linkcode Code} + * + * @param {Code} code - Character code to check + * @return {boolean} `true` if `code` passes test + */ +type CodeCheck = (code: Code) => boolean + +export type { CodeCheck as default } diff --git a/src/types/code.ts b/src/types/code.ts new file mode 100644 index 0000000..dbd670f --- /dev/null +++ b/src/types/code.ts @@ -0,0 +1,17 @@ +/** + * @file Type Aliases - Code + * @module vfile-lexer/types/Code + */ + +/** + * A character code, with `null` denoting end of stream (eof). + * + * This often the same as what [`String#codePointAt`][codepointat] yields, but + * meaning is added to other values as well. Negative integers can be used to + * represent line endings and tabs. + * + * [codepointat]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String/codePointAt + */ +type Code = number | null + +export type { Code as default } diff --git a/src/types/construct-pack.ts b/src/types/construct-pack.ts new file mode 100644 index 0000000..393d5cd --- /dev/null +++ b/src/types/construct-pack.ts @@ -0,0 +1,15 @@ +/** + * @file Type Aliases - ConstructPack + * @module vfile-lexer/types/ConstructPack + */ + +import type { Construct } from '#src/interfaces' + +/** + * A single construct or list of constructs. + * + * @see {@linkcode Construct} + */ +type ConstructPack = Construct | Construct[] + +export type { ConstructPack as default } diff --git a/src/types/construct-record.ts b/src/types/construct-record.ts deleted file mode 100644 index 9fd853f..0000000 --- a/src/types/construct-record.ts +++ /dev/null @@ -1,31 +0,0 @@ -/** - * @file Type Aliases - ConstructRecord - * @module vfile-lexer/types/ConstructRecord - */ - -import type RecordConstructs from './constructs-record' - -/** - * Several constructs, mapped from their initial codes. - */ -type ConstructRecord = { - /** - * Try tokenizing constructs that start with the specified character code. - * - * > 👉 Does not run on end-of-file code (`null`). - * - * @see {@linkcode RecordConstructs} - */ - [code: `${number}` | number]: RecordConstructs | null | undefined - - /** - * Try tokenizing constructs that start with any character code. - * - * > 👉 Does not run on end-of-file code (`null`). - * - * @see {@linkcode RecordConstructs} - */ - null?: RecordConstructs | null | undefined -} - -export type { ConstructRecord as default } diff --git a/src/types/constructs-record.ts b/src/types/constructs-record.ts deleted file mode 100644 index 8ba1226..0000000 --- a/src/types/constructs-record.ts +++ /dev/null @@ -1,15 +0,0 @@ -/** - * @file Type Aliases - RecordConstructs - * @module vfile-lexer/types/RecordConstructs - */ - -import type { Construct } from '#src/interfaces' - -/** - * A single construct or list of constructs. - * - * @see {@linkcode Construct} - */ -type RecordConstructs = Construct | Construct[] | readonly Construct[] - -export type { RecordConstructs as default } diff --git a/src/types/constructs.ts b/src/types/constructs.ts index eb49571..8908600 100644 --- a/src/types/constructs.ts +++ b/src/types/constructs.ts @@ -3,16 +3,16 @@ * @module vfile-lexer/types/Constructs */ -import type ConstructRecord from './construct-record' -import type RecordConstructs from './constructs-record' +import type { ConstructRecord } from '#src/interfaces' +import type ConstructPack from './construct-pack' /** * A single construct, list of constructs, or several constructs mapped from * their initial codes. * + * @see {@linkcode ConstructPack} * @see {@linkcode ConstructRecord} - * @see {@linkcode RecordConstructs} */ -type Constructs = ConstructRecord | RecordConstructs +type Constructs = ConstructRecord | ConstructPack export type { Constructs as default } diff --git a/src/types/consume.ts b/src/types/consume.ts index 9ca5209..b255b8d 100644 --- a/src/types/consume.ts +++ b/src/types/consume.ts @@ -3,7 +3,7 @@ * @module vfile-lexer/types/Consume */ -import type { Code } from '@flex-development/vfile-reader' +import type Code from './code' /** * Deal with a character `code` and move onto the next. diff --git a/src/types/define-skip.ts b/src/types/define-skip.ts new file mode 100644 index 0000000..c2f3111 --- /dev/null +++ b/src/types/define-skip.ts @@ -0,0 +1,25 @@ +/** + * @file Type Aliases - DefineSkip + * @module vfile-lexer/types/DefineSkip + */ + +import type { Point } from '#src/interfaces' + +/** + * Define a skip. + * + * As containers may "nibble" a prefix from margins, where a line starts after + * that prefix can be defined here. + * + * When a tokenizer moves after consuming a line ending corresponding to + * `point.line`, the tokenizer shifts past the prefix based on the column in the + * shifted point. + * + * @see {@linkcode Point} + * + * @param {Pick} point - Skip point + * @return {undefined} Nothing + */ +type DefineSkip = (point: Pick) => undefined + +export type { DefineSkip as default } diff --git a/src/types/encoding.ts b/src/types/encoding.ts new file mode 100644 index 0000000..65975a6 --- /dev/null +++ b/src/types/encoding.ts @@ -0,0 +1,23 @@ +/** + * @file Type Aliases - Encoding + * @module vfile-lexer/types/Encoding + */ + +/** + * Encodings supported by {@linkcode TextDecoder}. + * + * > 👉 Arbitrary encodings can be supported depending on how the engine is + * > built, so any string *could* be valid. + * + * @see https://nodejs.org/api/util.html#whatwg-supported-encodings + */ +type Encoding = + | 'unicode-1-1-utf-8' // always supported in node + | 'utf-16be' // not supported when ICU is disabled + | 'utf-16le' // always supported in node + | 'utf-8' // always supported in node + | 'utf16' // always supported in node + | 'utf8' // always supported in node + | (string & {}) // everything else (depends on browser, or full ICU data) + +export type { Encoding as default } diff --git a/src/types/enter.ts b/src/types/enter.ts index 01cd1cc..b0c7886 100644 --- a/src/types/enter.ts +++ b/src/types/enter.ts @@ -3,8 +3,7 @@ * @module vfile-lexer/types/Enter */ -import type { Token } from '#src/interfaces' -import type TokenFields from './token-fields' +import type { Token, TokenFields } from '#src/interfaces' import type TokenType from './token-type' /** @@ -15,9 +14,9 @@ import type TokenType from './token-type' * @see {@linkcode Token} * * @param {TokenType} type - Token type - * @param {(Partial | null)?} fields - Token fields + * @param {TokenFields | null | undefined} [fields] - Token fields * @return {Token} Open token */ -type Enter = (type: TokenType, fields?: Partial | null) => Token +type Enter = (type: TokenType, fields?: TokenFields | null | undefined) => Token export type { Enter as default } diff --git a/src/types/event-type.ts b/src/types/event-type.ts index 9049cc1..d2adca1 100644 --- a/src/types/event-type.ts +++ b/src/types/event-type.ts @@ -3,13 +3,9 @@ * @module vfile-lexer/types/Event */ -import type { ev } from '#src/enums' - /** * Union of event types. - * - * @see {@linkcode ev} */ -type EventType = keyof typeof ev | ev +type EventType = 'enter' | 'exit' export type { EventType as default } diff --git a/src/types/file-like.ts b/src/types/file-like.ts new file mode 100644 index 0000000..afd35bc --- /dev/null +++ b/src/types/file-like.ts @@ -0,0 +1,20 @@ +/** + * @file Type Aliases - FileLike + * @module vfile-lexer/types/FileLike + */ + +import type Value from './value' + +/** + * A file-like structure. + */ +type FileLike = { + /** + * Contents of file. + * + * @see {@linkcode Value} + */ + value: Value +} + +export type { FileLike as default } diff --git a/src/types/finalize-context.ts b/src/types/finalize-context.ts index adb9744..a4d0e3e 100644 --- a/src/types/finalize-context.ts +++ b/src/types/finalize-context.ts @@ -4,17 +4,21 @@ */ import type { TokenizeContext } from '#src/interfaces' +import type Lexer from '#src/lexer' /** * Finalize the tokenization context. * * @see {@linkcode TokenizeContext} * - * @param {TokenizeContext} context - Base context - * @return {TokenizeContext | null | undefined | void} Final context + * @this {Lexer} + * + * @param {TokenizeContext} base - Base context + * @return {TokenizeContext | null | undefined} Final context */ type FinalizeContext = ( + this: Lexer, base: TokenizeContext -) => TokenizeContext | null | undefined | void +) => TokenizeContext | null | undefined export type { FinalizeContext as default } diff --git a/src/types/guard.ts b/src/types/guard.ts index 7ead90b..0702a81 100644 --- a/src/types/guard.ts +++ b/src/types/guard.ts @@ -4,7 +4,7 @@ */ import type { TokenizeContext } from '#src/interfaces' -import type { Code } from '@flex-development/vfile-reader' +import type Code from './code' /** * Check the given character `code`. diff --git a/src/types/index.ts b/src/types/index.ts index 21dec22..2f64a2c 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -3,35 +3,39 @@ * @module vfile-lexer/types */ -export type { - Code, - CodeCheck, - Offset, - Range, - RangeTuple, - ReaderSlice -} from '@flex-development/vfile-reader' +export type { Column, Line, Offset } from '@flex-development/unist-util-types' +export type { Indices, SerializedPoint } from '@flex-development/vfile-location' export type { default as Attempt, default as Check, default as Interrupt } from './attempt' -export type { default as ConstructRecord } from './construct-record' +export type { default as Chunk } from './chunk' +export type { default as Code } from './code' +export type { default as CodeCheck } from './code-check' +export type { default as ConstructPack } from './construct-pack' export type { default as Constructs } from './constructs' -export type { default as RecordConstructs } from './constructs-record' export type { default as Consume } from './consume' -export type { default as Effects } from './effects' +export type { default as DefineSkip } from './define-skip' +export type { default as Encoding } from './encoding' export type { default as Enter } from './enter' export type { default as Event } from './event' export type { default as EventType } from './event-type' export type { default as Exit } from './exit' +export type { default as FileLike } from './file-like' export type { default as FinalizeContext } from './finalize-context' export type { default as Guard } from './guard' export type { default as Initializer } from './initializer' +export type { default as Now } from './now' +export type { default as Preprocessor } from './preprocessor' export type { default as Resolver } from './resolver' export type { default as ReturnHandle } from './return-handle' +export type { default as SliceSerialize } from './slice-serialize' +export type { default as SliceStream } from './slice-stream' export type { default as State } from './state' export type { default as TokenFactory } from './token-factory' -export type { default as TokenFields } from './token-fields' export type { default as TokenType } from './token-type' +export type { default as TokenizeOptions } from './tokenize-options' export type { default as Tokenizer } from './tokenizer' +export type { default as Value } from './value' +export type { default as Write } from './write' diff --git a/src/types/initializer.ts b/src/types/initializer.ts index f667026..5905594 100644 --- a/src/types/initializer.ts +++ b/src/types/initializer.ts @@ -3,8 +3,7 @@ * @module vfile-lexer/types/Initializer */ -import type { TokenizeContext } from '#src/interfaces' -import type Effects from './effects' +import type { Effects, TokenizeContext } from '#src/interfaces' import type State from './state' import type Tokenizer from './tokenizer' diff --git a/src/types/now.ts b/src/types/now.ts new file mode 100644 index 0000000..dcc3863 --- /dev/null +++ b/src/types/now.ts @@ -0,0 +1,17 @@ +/** + * @file Type Aliases - Now + * @module vfile-lexer/types/Now + */ + +import type { Place } from '#src/interfaces' + +/** + * Get the current point in the file. + * + * @see {@linkcode Place} + * + * @return {Place} Current place in file + */ +type Now = () => Place + +export type { Now as default } diff --git a/src/types/preprocessor.ts b/src/types/preprocessor.ts new file mode 100644 index 0000000..67b009c --- /dev/null +++ b/src/types/preprocessor.ts @@ -0,0 +1,31 @@ +/** + * @file Type Aliases - Preprocessor + * @module vfile-lexer/types/Preprocessor + */ + +import type Code from './code' +import type Encoding from './encoding' +import type FileLike from './file-like' +import type Value from './value' + +/** + * Turn `value` into character code chunks. + * + * @see {@linkcode Code} + * @see {@linkcode Encoding} + * @see {@linkcode FileLike} + * @see {@linkcode Value} + * + * @param {FileLike | Value | null | undefined} value - Value to preprocess + * @param {Encoding | null | undefined} [encoding] - Character encoding to use + * when value or its contents is {@linkcode Uint8Array} + * @param {boolean | null | undefined} [end] - End of stream? + * @return {Code[]} Character code chunks + */ +type Preprocessor = ( + value: FileLike | Value | null | undefined, + encoding?: Encoding | null | undefined, + end?: boolean | null | undefined +) => Code[] + +export type { Preprocessor as default } diff --git a/src/types/slice-serialize.ts b/src/types/slice-serialize.ts new file mode 100644 index 0000000..03d18c5 --- /dev/null +++ b/src/types/slice-serialize.ts @@ -0,0 +1,22 @@ +/** + * @file Type Aliases - SliceSerialize + * @module vfile-lexer/types/SliceSerialize + */ + +import type { Position } from '#src/interfaces' + +/** + * Get the text spanning `range` without changing the position of the reader. + * + * @see {@linkcode Position} + * + * @param {Position} range - Slice position + * @param {boolean | null | undefined} [expandTabs] - Expand tabs? + * @return {string} Serialized slice + */ +type SliceSerialize = ( + range: Position, + expandTabs?: boolean | null | undefined +) => string + +export type { SliceSerialize as default } diff --git a/src/types/slice-stream.ts b/src/types/slice-stream.ts new file mode 100644 index 0000000..70ab600 --- /dev/null +++ b/src/types/slice-stream.ts @@ -0,0 +1,20 @@ +/** + * @file Type Aliases - SliceStream + * @module vfile-lexer/types/SliceStream + */ + +import type { Position } from '#src/interfaces' +import type Code from './code' + +/** + * Get the chunks spanning `range`. + * + * @see {@linkcode Code} + * @see {@linkcode Token} + * + * @param {Position} range - Position in stream + * @return {Code[]} List of chunks + */ +type SliceStream = (range: Position) => Code[] + +export type { SliceStream as default } diff --git a/src/types/state.ts b/src/types/state.ts index 4c0386e..1c88601 100644 --- a/src/types/state.ts +++ b/src/types/state.ts @@ -3,7 +3,7 @@ * @module vfile-lexer/types/State */ -import type { Code } from '@flex-development/vfile-reader' +import type Code from './code' /** * The main unit in the state machine: a function that gets a character code and diff --git a/src/types/token-factory.ts b/src/types/token-factory.ts index 4e5e65e..1d7db08 100644 --- a/src/types/token-factory.ts +++ b/src/types/token-factory.ts @@ -3,21 +3,20 @@ * @module vfile-lexer/types/TokenFactory */ -import type { Token } from '#src/interfaces' -import type TokenFields from './token-fields' +import type { Token, TokenInfo } from '#src/interfaces' import type TokenType from './token-type' /** * Create a new token. * - * @see {@linkcode TokenFields} + * @see {@linkcode TokenInfo} * @see {@linkcode TokenType} * @see {@linkcode Token} * * @param {TokenType} type - Token type - * @param {TokenFields} fields - Token fields + * @param {TokenInfo} info - Token info * @return {Token} New token */ -type TokenFactory = (type: TokenType, fields: TokenFields) => Token +type TokenFactory = (type: TokenType, info: TokenInfo) => Token export type { TokenFactory as default } diff --git a/src/types/token-fields.ts b/src/types/token-fields.ts deleted file mode 100644 index 92216b4..0000000 --- a/src/types/token-fields.ts +++ /dev/null @@ -1,15 +0,0 @@ -/** - * @file Type Aliases - TokenFields - * @module vfile-lexer/types/TokenFields - */ - -import type { Token } from '#src/interfaces' - -/** - * Additional token fields. - * - * @see {@linkcode Token} - */ -type TokenFields = Omit - -export type { TokenFields as default } diff --git a/src/types/token-type.ts b/src/types/token-type.ts index 5697bf9..e459e43 100644 --- a/src/types/token-type.ts +++ b/src/types/token-type.ts @@ -11,6 +11,6 @@ import type { TokenTypeMap } from '#src/interfaces' * To register custom token types, augment {@linkcode TokenTypeMap}. They will * be added to this union automatically. */ -type TokenType = keyof TokenTypeMap +type TokenType = Extract export type { TokenType as default } diff --git a/src/types/tokenize-options.ts b/src/types/tokenize-options.ts new file mode 100644 index 0000000..630eca5 --- /dev/null +++ b/src/types/tokenize-options.ts @@ -0,0 +1,16 @@ +/** + * @file Type Aliases - TokenizeOptions + * @module vfile-lexer/types/TokenizeOptions + */ + +import type { Options, PreprocessOptions } from '#src/interfaces' + +/** + * Tokenize options. + * + * @see {@linkcode Options} + * @see {@linkcode PreprocessOptions} + */ +type TokenizeOptions = Options & PreprocessOptions + +export type { TokenizeOptions as default } diff --git a/src/types/tokenizer.ts b/src/types/tokenizer.ts index f9b2e0b..089dd9f 100644 --- a/src/types/tokenizer.ts +++ b/src/types/tokenizer.ts @@ -3,8 +3,7 @@ * @module vfile-lexer/types/Tokenizer */ -import type { TokenizeContext } from '#src/interfaces' -import type Effects from './effects' +import type { Effects, TokenizeContext } from '#src/interfaces' import type State from './state' /** diff --git a/src/types/value.ts b/src/types/value.ts new file mode 100644 index 0000000..1aaddfd --- /dev/null +++ b/src/types/value.ts @@ -0,0 +1,13 @@ +/** + * @file Type Aliases - Value + * @module vfile-lexer/types/Value + */ + +/** + * Contents of a file. + * + * Can either be text, or a {@linkcode Uint8Array} like structure. + */ +type Value = Uint8Array | string + +export type { Value as default } diff --git a/src/types/write.ts b/src/types/write.ts new file mode 100644 index 0000000..f4c10f5 --- /dev/null +++ b/src/types/write.ts @@ -0,0 +1,22 @@ +/** + * @file Type Aliases - Write + * @module vfile-lexer/types/Write + */ + +import type Chunk from './chunk' +import type Event from './event' + +/** + * Write a slice of chunks. + * + * The eof code (`null`) can be used to signal end of stream. + * + * @see {@linkcode Chunk} + * @see {@linkcode Event} + * + * @param {Chunk[]} slice - Chunks + * @return {Event[]} List of events + */ +type Write = (slice: Chunk[]) => Event[] + +export type { Write as default } diff --git a/src/utils/__tests__/is-line-ending.spec.ts b/src/utils/__tests__/is-line-ending.spec.ts new file mode 100644 index 0000000..4ee8c19 --- /dev/null +++ b/src/utils/__tests__/is-line-ending.spec.ts @@ -0,0 +1,25 @@ +/** + * @file Unit Tests - isLineEnding + * @module vfile-lexer/utils/tests/unit/isLineEnding + */ + +import { codes } from '#src/enums' +import testSubject from '../is-line-ending' + +describe('unit:utils/isLineEnding', () => { + it('should return false if `code` is not line ending', () => { + expect(testSubject(codes.eof)).to.be.false + }) + + it.each([ + 'cr', + 'crlf', + 'lf', + 'ls', + 'ps', + 'vcr', + 'vlf' + ])('should return true if `code` is line ending (codes.%s)', key => { + expect(testSubject(codes[key])).to.be.true + }) +}) diff --git a/src/utils/__tests__/resolve-all.functional.spec.ts b/src/utils/__tests__/resolve-all.functional.spec.ts index 9ad6993..06e2b3c 100644 --- a/src/utils/__tests__/resolve-all.functional.spec.ts +++ b/src/utils/__tests__/resolve-all.functional.spec.ts @@ -3,29 +3,29 @@ * @module vfile-lexer/utils/tests/functional/resolveAll */ -import { ev, tt } from '#src/enums' -import type { Construct, Token, TokenizeContext } from '#src/interfaces' +import tt from '#fixtures/tt' +import { ev } from '#src/enums' +import type { Construct, Place, Token, TokenizeContext } from '#src/interfaces' import type { Event, Resolver } from '#src/types' -import type { MockInstance } from '#tests/interfaces' -import type { Point } from '@flex-development/vfile-reader' +import type { MockInstance } from 'vitest' import testSubject from '../resolve-all' describe('functional:utils/resolveAll', () => { let constructs: { resolveAll: MockInstance }[] let context: TokenizeContext let events: Event[] - let point: Point + let point: Place let resolveAll: MockInstance let token: Token beforeEach(() => { - point = { column: 1, line: 1, offset: 0 } - token = { end: point, start: point, type: tt.sof } + point = { _index: 0, column: 1, line: 1, offset: 0 } + token = { end: point, start: point, type: tt.eof } context = {} as unknown as TokenizeContext events = [[ev.enter, token, context], [ev.exit, token, context]] - resolveAll = vi.fn<[Event[], TokenizeContext], Event[]>(() => events) + resolveAll = vi.fn(() => events) constructs = [{ resolveAll }, { resolveAll }] }) diff --git a/src/utils/__tests__/resolve-slice.functional.spec.ts b/src/utils/__tests__/resolve-slice.functional.spec.ts index 20edecc..d77f1be 100644 --- a/src/utils/__tests__/resolve-slice.functional.spec.ts +++ b/src/utils/__tests__/resolve-slice.functional.spec.ts @@ -3,30 +3,29 @@ * @module vfile-lexer/utils/tests/functional/resolveSlice */ -import tk from '#fixtures/tk' +import tt from '#fixtures/tt' import { ev } from '#src/enums' import type { Token, TokenizeContext } from '#src/interfaces' -import type { Event } from '#src/types' -import type { MockInstance } from '#tests/interfaces' -import type { Range } from '@flex-development/vfile-reader' +import type { Event, SliceSerialize } from '#src/types' +import type { MockInstance } from 'vitest' import testSubject from '../resolve-slice' describe('functional:utils/resolveSlice', () => { let context: TokenizeContext let events: Event[] - let sliceSerialize: MockInstance + let sliceSerialize: MockInstance let token: Token let value: string beforeEach(() => { token = { - end: { column: 1, line: 2, offset: 17 }, - start: { column: 17, line: 1, offset: 16 }, - type: tk.whitespace + end: { _index: 16, column: 17, line: 1, offset: 16 }, + start: { _index: 0, column: 1, line: 1, offset: 0 }, + type: tt.typeMetadata } - value = '\n' - sliceSerialize = vi.fn<[range: Range], string>(() => value) + value = '{{ id: string }}' + sliceSerialize = vi.fn(() => value) context = { sliceSerialize } as unknown as TokenizeContext events = [[ev.enter, token, context], [ev.exit, token, context]] diff --git a/src/utils/index.ts b/src/utils/index.ts index 92f1d24..25e363c 100644 --- a/src/utils/index.ts +++ b/src/utils/index.ts @@ -3,6 +3,7 @@ * @module vfile-lexer/utils */ -export { chars, codes } from '@flex-development/vfile-reader' +export { default as isLineEnding } from './is-line-ending' export { default as resolveAll } from './resolve-all' export { default as resolveSlice } from './resolve-slice' +export { default as resolveTokenList } from './resolve-token-list' diff --git a/src/utils/is-line-ending.ts b/src/utils/is-line-ending.ts new file mode 100644 index 0000000..e7e6422 --- /dev/null +++ b/src/utils/is-line-ending.ts @@ -0,0 +1,32 @@ +/** + * @file Utilities - isLineEnding + * @module vfile-lexer/utils/isLineEnding + */ + +import { codes } from '#src/enums' +import type { Code } from '#src/types' + +/** + * Check if the given character `code` represents a line ending. + * + * @see {@linkcode Code} + * + * @param {Code} code - Character code to check + * @return {code is NonNullable} `true` if `code` is line ending + */ +function isLineEnding(code: Code): code is NonNullable { + switch (code) { + case codes.cr: + case codes.crlf: + case codes.lf: + case codes.ls: + case codes.ps: + case codes.vcr: + case codes.vlf: + return true + default: + return false + } +} + +export default isLineEnding diff --git a/src/utils/resolve-all.ts b/src/utils/resolve-all.ts index 7556b0d..1f9dccc 100644 --- a/src/utils/resolve-all.ts +++ b/src/utils/resolve-all.ts @@ -13,14 +13,14 @@ import type { Event, Resolver } from '#src/types' * @see {@linkcode Event} * @see {@linkcode TokenizeContext} * - * @param {ReadonlyArray>} constructs - List of constructs - * @param {ReadonlyArray} events - List of events + * @param {Partial[]} constructs - List of constructs + * @param {Event[]} events - List of events * @param {TokenizeContext} context - Tokenize context * @return {Event[]} Changed events */ function resolveAll( - constructs: readonly Partial[], - events: readonly Event[], + constructs: Partial[], + events: Event[], context: TokenizeContext ): Event[] { /** @@ -46,12 +46,12 @@ function resolveAll( const resolve: Resolver | null | undefined = constructs[i]!.resolveAll if (resolve && !called.includes(resolve)) { - events = resolve([...events], context) + events = resolve(events, context) called.push(resolve) } } - return [...events] + return events } export default resolveAll diff --git a/src/utils/resolve-slice.ts b/src/utils/resolve-slice.ts index f682eb2..e472eaa 100644 --- a/src/utils/resolve-slice.ts +++ b/src/utils/resolve-slice.ts @@ -18,13 +18,13 @@ import type { Event } from '#src/types' * @see {@linkcode Event} * @see {@linkcode TokenizeContext} * - * @param {ReadonlyArray} events - List of events + * @param {Event[]} events - List of events * @param {Partial} context - Tokenize context * @param {(string | null | undefined)?} [field] - Token field * @return {Event[]} Changed events */ function resolveSlice( - events: readonly Event[], + events: Event[], context: Partial, field?: string | null | undefined ): Event[] { @@ -47,7 +47,7 @@ function resolveSlice( } } - return [...events] + return events } export default resolveSlice diff --git a/src/utils/resolve-token-list.ts b/src/utils/resolve-token-list.ts new file mode 100644 index 0000000..3982b14 --- /dev/null +++ b/src/utils/resolve-token-list.ts @@ -0,0 +1,55 @@ +/** + * @file Utilities - resolveTokenList + * @module vfile-lexer/utils/resolveTokenList + */ + +import { ev } from '#src/enums' +import type { Token, TokenizeContext } from '#src/interfaces' +import type { Event } from '#src/types' +import { ok as assert } from 'devlop' + +/** + * Resolve a linked token list. + * + * @see {@linkcode Event} + * @see {@linkcode TokenizeContext} + * + * @template {Event[] | ReadonlyArray} T - List of events + * + * @param {T} events - List of events + * @return {T} Changed events + */ +function resolveTokenList(events: T): T { + if (events.length) { + /** + * Head token. + * + * @const {Token | undefined} head + */ + let head: Token | undefined + + /** + * Tail token. + * + * @const {Token | undefined} tail + */ + let tail: Token | undefined + + for (const [event, token] of events) { + if (event === ev.enter) { + if (head) { + assert(tail, 'expected tail token') + token.previous = tail + tail.next = token + tail = tail.next + } else { + head = tail = token + } + } + } + } + + return events +} + +export default resolveTokenList diff --git a/typings/@flex-development/vfile-lexer/index.d.mts b/typings/@flex-development/vfile-lexer/index.d.mts index ad539e9..7e69131 100644 --- a/typings/@flex-development/vfile-lexer/index.d.mts +++ b/typings/@flex-development/vfile-lexer/index.d.mts @@ -1,14 +1,13 @@ -import type tk from '#fixtures/tk' +import type tt from '#fixtures/tt' import type {} from '#src/interfaces' declare module '#src/interfaces' { + interface TokenFields { + value?: string | null | undefined + } + interface TokenTypeMap { - bigint: tk.bigint - inlineTag: tk.inlineTag - number: tk.number - punctuator: tk.punctuator - string: tk.string - tag: tk.tag - whitespace: tk.whitespace + eof: tt.eof + typeMetadata: tt.typeMetadata } } diff --git a/yarn.lock b/yarn.lock index 16a28ce..3b73000 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1253,10 +1253,10 @@ __metadata: languageName: node linkType: hard -"@faker-js/faker@npm:9.0.0-alpha.0": - version: 9.0.0-alpha.0 - resolution: "@faker-js/faker@npm:9.0.0-alpha.0" - checksum: 10/e9bb4b32ec170be4bb484826d221de6bfd3a515f57bb9d9f7f6e24816187399dd17d1527a8b4d0f02f891c1fac6d8751f0ab0d0d8d6a9854309ab3102e1267cd +"@faker-js/faker@npm:9.0.0-rc.0": + version: 9.0.0-rc.0 + resolution: "@faker-js/faker@npm:9.0.0-rc.0" + checksum: 10/6272e69c34e793b9e897b269e29e3aadd0d90850b6c4c9fcba6ec1fb9b4a1b32e40092ee77a4ddbe3dc36ea84148cad6d040a7f19e76fdf507b54a903aa5675e languageName: node linkType: hard @@ -1680,23 +1680,23 @@ __metadata: languageName: node linkType: hard -"@flex-development/unist-util-inspect@npm:1.0.0": - version: 1.0.0 - resolution: "@flex-development/unist-util-inspect@npm:1.0.0::__archiveUrl=https%3A%2F%2Fnpm.pkg.github.com%2Fdownload%2F%40flex-development%2Funist-util-inspect%2F1.0.0%2F2585616dd8111de66e7ddc2d846a5e077c57c982" +"@flex-development/unist-util-inspect@npm:1.0.1": + version: 1.0.1 + resolution: "@flex-development/unist-util-inspect@npm:1.0.1::__archiveUrl=https%3A%2F%2Fnpm.pkg.github.com%2Fdownload%2F%40flex-development%2Funist-util-inspect%2F1.0.1%2F62e4316a7ec1359ea377db40bd063dc83808b75e" dependencies: - "@flex-development/unist-util-stringify-position": "npm:1.0.0" + "@flex-development/unist-util-stringify-position": "npm:1.0.1" "@types/unist": "npm:3.0.2" - checksum: 10/c1b81c3191e2be2bbbcff95f1a2e99afe4360cd72eebbe0b5401499fd9c89c9fa2e42adc7f875fbc7a6204bb75d33a2ad7f0b53a19bca4547182d784143a21d8 + checksum: 10/9d4da80d5be62ead6afff50c07a2587c5bf5f51bb3432b38f74863a6b55d0e4c9e854b4f73f1a98746b2c9409e5cc76f40cfe85ba0c7eb634ba825f39eb536dc languageName: node linkType: hard -"@flex-development/unist-util-stringify-position@npm:1.0.0": - version: 1.0.0 - resolution: "@flex-development/unist-util-stringify-position@npm:1.0.0::__archiveUrl=https%3A%2F%2Fnpm.pkg.github.com%2Fdownload%2F%40flex-development%2Funist-util-stringify-position%2F1.0.0%2Fb2a5cbc97f53cee6e215f15a908a78109e543b48" +"@flex-development/unist-util-stringify-position@npm:1.0.1": + version: 1.0.1 + resolution: "@flex-development/unist-util-stringify-position@npm:1.0.1::__archiveUrl=https%3A%2F%2Fnpm.pkg.github.com%2Fdownload%2F%40flex-development%2Funist-util-stringify-position%2F1.0.1%2F573c6a592094d914733e4491a7a68de42c196c8f" dependencies: "@flex-development/unist-util-types": "npm:1.6.1" "@types/unist": "npm:3.0.2" - checksum: 10/085cfe2ce2398c1aa0f986ea54789a1cb1df69a348af7f2e013a443fd5672c76c8d2764cacefa0babe9545ed63a623876e0eb2c989a9516dad55e216d5bb9a53 + checksum: 10/86410dc2984ecfb60f2db63d114b1929b056acf09e38dc822aea6b6dbcf92ebfd6e52f59d2392e10d9b60579e65fd21c740061943ffbb56e82d896d78c9ed5a1 languageName: node linkType: hard @@ -1717,7 +1717,7 @@ __metadata: "@commitlint/cli": "npm:19.3.0" "@commitlint/types": "npm:19.0.3" "@eslint/js": "npm:9.5.0" - "@faker-js/faker": "npm:9.0.0-alpha.0" + "@faker-js/faker": "npm:9.0.0-rc.0" "@flex-development/commitlint-config": "npm:1.0.1" "@flex-development/decorator-regex": "npm:2.0.0" "@flex-development/esm-types": "npm:2.0.0" @@ -1727,21 +1727,21 @@ __metadata: "@flex-development/pathe": "npm:2.0.0" "@flex-development/tutils": "npm:6.0.0-alpha.25" "@flex-development/unist-util-builder": "npm:2.0.0" - "@flex-development/unist-util-inspect": "npm:1.0.0" + "@flex-development/unist-util-inspect": "npm:1.0.1" "@flex-development/unist-util-types": "npm:1.6.1" - "@flex-development/vfile-reader": "npm:3.1.2" + "@flex-development/vfile-location": "npm:1.1.0" "@stylistic/eslint-plugin": "npm:2.2.2" "@types/chai": "npm:4.3.16" "@types/debug": "npm:4.1.12" "@types/eslint": "npm:8.56.10" "@types/eslint__js": "npm:8.42.3" "@types/is-ci": "npm:3.0.4" - "@types/node": "npm:20.14.6" + "@types/node": "npm:20.14.11" "@types/node-notifier": "npm:8.0.5" "@types/unist": "npm:3.0.2" "@vates/toggle-scripts": "npm:1.0.0" - "@vitest/coverage-v8": "npm:2.0.0-beta.11" - "@vitest/ui": "npm:2.0.0-beta.11" + "@vitest/coverage-v8": "npm:2.0.4" + "@vitest/ui": "npm:2.0.4" chai: "npm:5.1.1" convert-hrtime: "npm:5.0.0" cross-env: "npm:7.0.3" @@ -1771,6 +1771,7 @@ __metadata: is-ci: "npm:3.0.1" jsonc-eslint-parser: "npm:2.4.0" lint-staged: "npm:15.2.7" + micromark-core-commonmark: "npm:2.0.1" micromark-util-character: "npm:2.1.0" micromark-util-chunked: "npm:2.0.0" node-notifier: "npm:10.0.1" @@ -1785,32 +1786,21 @@ __metadata: ts-dedent: "npm:2.2.0" typescript: "npm:5.5.2" typescript-eslint: "npm:8.0.0-alpha.30" - vfile: "npm:6.0.1" + vfile: "npm:6.0.2" vite-tsconfig-paths: "npm:4.3.2" - vitest: "npm:2.0.0-beta.11" + vitest: "npm:2.0.4" yaml-eslint-parser: "npm:1.2.3" languageName: unknown linkType: soft -"@flex-development/vfile-location@npm:1.0.2": - version: 1.0.2 - resolution: "@flex-development/vfile-location@npm:1.0.2::__archiveUrl=https%3A%2F%2Fnpm.pkg.github.com%2Fdownload%2F%40flex-development%2Fvfile-location%2F1.0.2%2Fb341b4a1817c124f2e2592815d00f6cbe408413c" +"@flex-development/vfile-location@npm:1.1.0": + version: 1.1.0 + resolution: "@flex-development/vfile-location@npm:1.1.0::__archiveUrl=https%3A%2F%2Fnpm.pkg.github.com%2Fdownload%2F%40flex-development%2Fvfile-location%2F1.1.0%2F3cb3d9ff81e6a56c0e5e50048701f90db0eb80e0" dependencies: "@flex-development/unist-util-types": "npm:1.6.1" "@types/unist": "npm:3.0.2" - vfile: "npm:6.0.1" - checksum: 10/9dd588ee62c045f9d170afdcf6a9e62bb05511fcdbe51bf820d76b4f756086c4f1fe1b754b8b453874aadbbfa85672ee008bb7d7974b3e49c6bf57210cdd1a79 - languageName: node - linkType: hard - -"@flex-development/vfile-reader@npm:3.1.2": - version: 3.1.2 - resolution: "@flex-development/vfile-reader@npm:3.1.2::__archiveUrl=https%3A%2F%2Fnpm.pkg.github.com%2Fdownload%2F%40flex-development%2Fvfile-reader%2F3.1.2%2Fda7a996737f030cf015064ad94c17dd875b2465e" - dependencies: - "@flex-development/unist-util-types": "npm:1.6.1" - "@flex-development/vfile-location": "npm:1.0.2" - vfile: "npm:6.0.1" - checksum: 10/9480de3863a4b088a4de34fc804078aae2d00692a352e588dd2a412380202f47290f46ba61ee14a47009e005680deddf2a3b1c5ab071341b0a8ec06d133cc4ba + vfile: "npm:6.0.2" + checksum: 10/3f217ede58e7f8855b412e7f25f78983a4c5e4256c1c3ab330efc7f8dbe0e30ca6983944950c1ac3b74081afba2d23688efef264c9e11bae796593ec85921c52 languageName: node linkType: hard @@ -1856,15 +1846,6 @@ __metadata: languageName: node linkType: hard -"@jest/schemas@npm:^29.6.3": - version: 29.6.3 - resolution: "@jest/schemas@npm:29.6.3" - dependencies: - "@sinclair/typebox": "npm:^0.27.8" - checksum: 10/910040425f0fc93cd13e68c750b7885590b8839066dfa0cd78e7def07bbb708ad869381f725945d66f2284de5663bbecf63e8fdd856e2ae6e261ba30b1687e93 - languageName: node - linkType: hard - "@jridgewell/gen-mapping@npm:^0.3.5": version: 0.3.5 resolution: "@jridgewell/gen-mapping@npm:0.3.5" @@ -2171,13 +2152,6 @@ __metadata: languageName: node linkType: hard -"@sinclair/typebox@npm:^0.27.8": - version: 0.27.8 - resolution: "@sinclair/typebox@npm:0.27.8" - checksum: 10/297f95ff77c82c54de8c9907f186076e715ff2621c5222ba50b8d40a170661c0c5242c763cba2a4791f0f91cb1d8ffa53ea1d7294570cf8cd4694c0e383e484d - languageName: node - linkType: hard - "@sindresorhus/chunkify@npm:^0.2.0": version: 0.2.0 resolution: "@sindresorhus/chunkify@npm:0.2.0" @@ -2480,12 +2454,12 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:*, @types/node@npm:20.14.6, @types/node@npm:^20.0.0": - version: 20.14.6 - resolution: "@types/node@npm:20.14.6" +"@types/node@npm:*, @types/node@npm:20.14.11, @types/node@npm:^20.0.0": + version: 20.14.11 + resolution: "@types/node@npm:20.14.11" dependencies: undici-types: "npm:~5.26.4" - checksum: 10/1dcfeeb03ce3c3a1d8a537fefee7cd0cffb78f89e9535b74ee12940559566b57c39dad20d1b165b60b5727408dd44e1a52e5c01cf02d0a99d93ef3da8062c86e + checksum: 10/344e1ce1ed16c86ed1c4209ab4d1de67db83dd6b694a6fabe295c47144dde2c58dabddae9f39a0a2bdd246e95f8d141ccfe848e464884b48b8918df4f7788025 languageName: node linkType: hard @@ -2732,97 +2706,105 @@ __metadata: languageName: node linkType: hard -"@vitest/coverage-v8@npm:2.0.0-beta.11": - version: 2.0.0-beta.11 - resolution: "@vitest/coverage-v8@npm:2.0.0-beta.11" +"@vitest/coverage-v8@npm:2.0.4": + version: 2.0.4 + resolution: "@vitest/coverage-v8@npm:2.0.4" dependencies: "@ampproject/remapping": "npm:^2.3.0" "@bcoe/v8-coverage": "npm:^0.2.3" debug: "npm:^4.3.5" istanbul-lib-coverage: "npm:^3.2.2" istanbul-lib-report: "npm:^3.0.1" - istanbul-lib-source-maps: "npm:^5.0.4" + istanbul-lib-source-maps: "npm:^5.0.6" istanbul-reports: "npm:^3.1.7" magic-string: "npm:^0.30.10" magicast: "npm:^0.3.4" - picocolors: "npm:^1.0.1" std-env: "npm:^3.7.0" - strip-literal: "npm:^2.1.0" test-exclude: "npm:^7.0.1" + tinyrainbow: "npm:^1.2.0" peerDependencies: - vitest: 2.0.0-beta.11 - checksum: 10/f7aa66cce570d81330382249ac6b55ecc30b91daf211a1972df54298a0568ddcd000720e192a0b6add2f557cc95e9779cb7e5bb5cdf514537056cd985419ac34 + vitest: 2.0.4 + checksum: 10/de23ca9c8e7cd704d889475af9a8282a1d29e4ca05909edc28df3f55b65a10cba344ab350ab255d0ad1b8a3dc6d98ebb12d5c2614bc8d92b03b645f15912ae61 languageName: node linkType: hard -"@vitest/expect@npm:2.0.0-beta.11": - version: 2.0.0-beta.11 - resolution: "@vitest/expect@npm:2.0.0-beta.11" +"@vitest/expect@npm:2.0.4": + version: 2.0.4 + resolution: "@vitest/expect@npm:2.0.4" dependencies: - "@vitest/spy": "npm:2.0.0-beta.11" - "@vitest/utils": "npm:2.0.0-beta.11" + "@vitest/spy": "npm:2.0.4" + "@vitest/utils": "npm:2.0.4" chai: "npm:^5.1.1" - checksum: 10/7d164183b26c846391df9bec35072a79e803fd15da1aadaca02656affefa919aaba69c3e43988dc47ee13dc8c1e81d07d22a5b34bcab2a531c6ab1eaf8e489f8 + tinyrainbow: "npm:^1.2.0" + checksum: 10/9e77266306a9ee6c982956e79e5086edeaec9f387fb9f8840d749ba9e026b27c01f68987a732b53746cd7fb0fce4a2620dbd0359ca3efe891a8ba89300568111 + languageName: node + linkType: hard + +"@vitest/pretty-format@npm:2.0.4, @vitest/pretty-format@npm:^2.0.4": + version: 2.0.4 + resolution: "@vitest/pretty-format@npm:2.0.4" + dependencies: + tinyrainbow: "npm:^1.2.0" + checksum: 10/16223d1c9f8c86cea7a064cf625380e90b20a5c2f95fda6ab3643c16cce1925afa337109ee12dcbf54834a161fd2b68be16179da9fd9fb948de986c33942203b languageName: node linkType: hard -"@vitest/runner@npm:2.0.0-beta.11": - version: 2.0.0-beta.11 - resolution: "@vitest/runner@npm:2.0.0-beta.11" +"@vitest/runner@npm:2.0.4": + version: 2.0.4 + resolution: "@vitest/runner@npm:2.0.4" dependencies: - "@vitest/utils": "npm:2.0.0-beta.11" - p-limit: "npm:^5.0.0" + "@vitest/utils": "npm:2.0.4" pathe: "npm:^1.1.2" - checksum: 10/e5d5e035ce1ed6f03be49d5f61422969cb9944c65e71bae73109a20ae07a0ea5a294f2fe7619e8a8d7b67eade769bf12f44ea1c430b592ed7a959e2650219329 + checksum: 10/a94872a08296b72316d1259fa8f12e314a47614b614cba03f1d0ba7f00e82f5d724b740ab17b8f6ddbe281acea278dec212f5050ac557b108df8f50b7aab6cbd languageName: node linkType: hard -"@vitest/snapshot@npm:2.0.0-beta.11": - version: 2.0.0-beta.11 - resolution: "@vitest/snapshot@npm:2.0.0-beta.11" +"@vitest/snapshot@npm:2.0.4": + version: 2.0.4 + resolution: "@vitest/snapshot@npm:2.0.4" dependencies: + "@vitest/pretty-format": "npm:2.0.4" magic-string: "npm:^0.30.10" pathe: "npm:^1.1.2" - pretty-format: "npm:^29.7.0" - checksum: 10/6e01b6fc89722e1708b3dca7a4373e5f46c1f5011444ab52a97c75d68c59e344e3154e795d2a18956e2cb810c517355eae42557dcd97503cb13ee20277f098e3 + checksum: 10/bbdc491d42a95945589a7006ef40beb199332b28b5832f111bd25e26b24bd78134efdb05b670e65dc82f83c654e1aedc445c26be20bdaa758a6c3cf844bd05b5 languageName: node linkType: hard -"@vitest/spy@npm:2.0.0-beta.11": - version: 2.0.0-beta.11 - resolution: "@vitest/spy@npm:2.0.0-beta.11" +"@vitest/spy@npm:2.0.4": + version: 2.0.4 + resolution: "@vitest/spy@npm:2.0.4" dependencies: tinyspy: "npm:^3.0.0" - checksum: 10/8de1f2fc63a543532986fb32f2b5703d732e4acb747cf55aa6f7944b028b28bf8702154980f431399e76f178747719fd26ac33934ca4716991b28ae2bbd49430 + checksum: 10/c18d0fc28e40a40f701a116a117d98916ec90f18e1643a37379b18f5fbee841e7c35fcb65202503506b471df761e0907053912d475e159399b887c1be6f91ef1 languageName: node linkType: hard -"@vitest/ui@npm:2.0.0-beta.11": - version: 2.0.0-beta.11 - resolution: "@vitest/ui@npm:2.0.0-beta.11" +"@vitest/ui@npm:2.0.4": + version: 2.0.4 + resolution: "@vitest/ui@npm:2.0.4" dependencies: - "@vitest/utils": "npm:2.0.0-beta.11" + "@vitest/utils": "npm:2.0.4" fast-glob: "npm:^3.3.2" fflate: "npm:^0.8.2" flatted: "npm:^3.3.1" pathe: "npm:^1.1.2" - picocolors: "npm:^1.0.1" sirv: "npm:^2.0.4" + tinyrainbow: "npm:^1.2.0" peerDependencies: - vitest: 2.0.0-beta.11 - checksum: 10/995e994051c2d722649a378a994cffba53b476874c782e9e27aee033bf04d46c7f4e7cb23e362b177efd7848025ba4aad458663677e4af5e975f369893c5a97e + vitest: 2.0.4 + checksum: 10/88996d4650889ab9cd2a539fa5651dd45f4236cd087c887fae592f243ed2645506e3faaaeac752f35f581557593781db10cc144b61b313d84ca2f2d1e3c0401c languageName: node linkType: hard -"@vitest/utils@npm:2.0.0-beta.11": - version: 2.0.0-beta.11 - resolution: "@vitest/utils@npm:2.0.0-beta.11" +"@vitest/utils@npm:2.0.4": + version: 2.0.4 + resolution: "@vitest/utils@npm:2.0.4" dependencies: - diff-sequences: "npm:^29.6.3" + "@vitest/pretty-format": "npm:2.0.4" estree-walker: "npm:^3.0.3" loupe: "npm:^3.1.1" - pretty-format: "npm:^29.7.0" - checksum: 10/6fc7d417363b4eb644c205f8944c4a2601d21553d8539856ada1654ea48c684ce7e473dff3b06e2edf5c16691bfdac0787362d31aac80292e67de65551ca2635 + tinyrainbow: "npm:^1.2.0" + checksum: 10/a17497cd3c12b72b315bda6a6a4addcbc206367f6bcdedb83d5d708ac40cf52fcc48403539d10528e1893348b2f107416e9065b6b5c39329f2512eea8f104578 languageName: node linkType: hard @@ -2975,13 +2957,6 @@ __metadata: languageName: node linkType: hard -"ansi-styles@npm:^5.0.0": - version: 5.2.0 - resolution: "ansi-styles@npm:5.2.0" - checksum: 10/d7f4e97ce0623aea6bc0d90dcd28881ee04cba06c570b97fd3391bd7a268eedfd9d5e2dd4fdcbdd82b8105df5faf6f24aaedc08eaf3da898e702db5948f63469 - languageName: node - linkType: hard - "ansi-styles@npm:^6.0.0, ansi-styles@npm:^6.1.0, ansi-styles@npm:^6.2.1": version: 6.2.1 resolution: "ansi-styles@npm:6.2.1" @@ -4083,13 +4058,6 @@ __metadata: languageName: node linkType: hard -"diff-sequences@npm:^29.6.3": - version: 29.6.3 - resolution: "diff-sequences@npm:29.6.3" - checksum: 10/179daf9d2f9af5c57ad66d97cb902a538bcf8ed64963fa7aa0c329b3de3665ce2eb6ffdc2f69f29d445fa4af2517e5e55e5b6e00c00a9ae4f43645f97f7078cb - languageName: node - linkType: hard - "diff@npm:^5.0.0": version: 5.2.0 resolution: "diff@npm:5.2.0" @@ -6263,14 +6231,14 @@ __metadata: languageName: node linkType: hard -"istanbul-lib-source-maps@npm:^5.0.4": - version: 5.0.4 - resolution: "istanbul-lib-source-maps@npm:5.0.4" +"istanbul-lib-source-maps@npm:^5.0.6": + version: 5.0.6 + resolution: "istanbul-lib-source-maps@npm:5.0.6" dependencies: "@jridgewell/trace-mapping": "npm:^0.3.23" debug: "npm:^4.1.1" istanbul-lib-coverage: "npm:^3.0.0" - checksum: 10/e6f9fedab9c047d0ca1e58bf1697c3d7478e77271e5cd55b01e425dcdfc99534f54c6dfb981d5746e9a69b2697009f907d4c4f02f4000d66f22164a7610e6aa2 + checksum: 10/569dd0a392ee3464b1fe1accbaef5cc26de3479eacb5b91d8c67ebb7b425d39fd02247d85649c3a0e9c29b600809fa60b5af5a281a75a89c01f385b1e24823a2 languageName: node linkType: hard @@ -6320,13 +6288,6 @@ __metadata: languageName: node linkType: hard -"js-tokens@npm:^9.0.0": - version: 9.0.0 - resolution: "js-tokens@npm:9.0.0" - checksum: 10/65e7a55a1a18d61f1cf94bfd7704da870b74337fa08d4c58118e69a8b10225b5ad887ff3ae595d720301b0924811a9b0594c679621a85ecbac6e3aac8533c53b - languageName: node - linkType: hard - "js-yaml@npm:^4.1.0": version: 4.1.0 resolution: "js-yaml@npm:4.1.0" @@ -7143,9 +7104,9 @@ __metadata: languageName: node linkType: hard -"micromark-core-commonmark@npm:^2.0.0": - version: 2.0.0 - resolution: "micromark-core-commonmark@npm:2.0.0" +"micromark-core-commonmark@npm:2.0.1, micromark-core-commonmark@npm:^2.0.0": + version: 2.0.1 + resolution: "micromark-core-commonmark@npm:2.0.1" dependencies: decode-named-character-reference: "npm:^1.0.0" devlop: "npm:^1.0.0" @@ -7163,7 +7124,7 @@ __metadata: micromark-util-subtokenize: "npm:^2.0.0" micromark-util-symbol: "npm:^2.0.0" micromark-util-types: "npm:^2.0.0" - checksum: 10/67f6e2f062f42a7ae21e8a409f3663843703a830ff27cf0f41cb0fb712c58e55409db428531d8124c4ef8d698cd81e7eb41485d24b8c352d2f0c06b535865367 + checksum: 10/15e788b3222401572ff8f549f8ecba21fa3395c000b8005e47204e8c97200e98bb0652c2c648e357b0996f1b50a7a63cc43e849f2976e4845b4453049040f8cc languageName: node linkType: hard @@ -7557,14 +7518,14 @@ __metadata: linkType: hard "micromark-util-subtokenize@npm:^2.0.0": - version: 2.0.0 - resolution: "micromark-util-subtokenize@npm:2.0.0" + version: 2.0.1 + resolution: "micromark-util-subtokenize@npm:2.0.1" dependencies: devlop: "npm:^1.0.0" micromark-util-chunked: "npm:^2.0.0" micromark-util-symbol: "npm:^2.0.0" micromark-util-types: "npm:^2.0.0" - checksum: 10/4d209894f9400ff73e093a4ce3d13870cd1f546b47e50355f849c4402cecd5d2039bd63bb624f2a09aaeba01a847634088942edb42f141e4869b3a85281cf64e + checksum: 10/8e1cae8859bcc3eed54c0dc896d9c2141c990299696455124205ce538e084caeaafcbe0d459a39b81cd45e761ff874d773dbf235ab6825914190701a15226789 languageName: node linkType: hard @@ -8170,15 +8131,6 @@ __metadata: languageName: node linkType: hard -"p-limit@npm:^5.0.0": - version: 5.0.0 - resolution: "p-limit@npm:5.0.0" - dependencies: - yocto-queue: "npm:^1.0.0" - checksum: 10/87bf5837dee6942f0dbeff318436179931d9a97848d1b07dbd86140a477a5d2e6b90d9701b210b4e21fe7beaea2979dfde366e4f576fa644a59bd4d6a6371da7 - languageName: node - linkType: hard - "p-locate@npm:^4.1.0": version: 4.1.0 resolution: "p-locate@npm:4.1.0" @@ -8386,7 +8338,7 @@ __metadata: languageName: node linkType: hard -"picocolors@npm:^1.0.0, picocolors@npm:^1.0.1": +"picocolors@npm:^1.0.0": version: 1.0.1 resolution: "picocolors@npm:1.0.1" checksum: 10/fa68166d1f56009fc02a34cdfd112b0dd3cf1ef57667ac57281f714065558c01828cdf4f18600ad6851cbe0093952ed0660b1e0156bddf2184b6aaf5817553a5 @@ -8487,17 +8439,6 @@ __metadata: languageName: node linkType: hard -"pretty-format@npm:^29.7.0": - version: 29.7.0 - resolution: "pretty-format@npm:29.7.0" - dependencies: - "@jest/schemas": "npm:^29.6.3" - ansi-styles: "npm:^5.0.0" - react-is: "npm:^18.0.0" - checksum: 10/dea96bc83c83cd91b2bfc55757b6b2747edcaac45b568e46de29deee80742f17bc76fe8898135a70d904f4928eafd8bb693cd1da4896e8bdd3c5e82cadf1d2bb - languageName: node - linkType: hard - "proc-log@npm:^3.0.0": version: 3.0.0 resolution: "proc-log@npm:3.0.0" @@ -8567,13 +8508,6 @@ __metadata: languageName: node linkType: hard -"react-is@npm:^18.0.0": - version: 18.2.0 - resolution: "react-is@npm:18.2.0" - checksum: 10/200cd65bf2e0be7ba6055f647091b725a45dd2a6abef03bf2380ce701fd5edccee40b49b9d15edab7ac08a762bf83cb4081e31ec2673a5bfb549a36ba21570df - languageName: node - linkType: hard - "read-package-json-fast@npm:^3.0.0, read-package-json-fast@npm:^3.0.2": version: 3.0.2 resolution: "read-package-json-fast@npm:3.0.2" @@ -9517,15 +9451,6 @@ __metadata: languageName: node linkType: hard -"strip-literal@npm:^2.1.0": - version: 2.1.0 - resolution: "strip-literal@npm:2.1.0" - dependencies: - js-tokens: "npm:^9.0.0" - checksum: 10/21c813aa1e669944e7e2318c8c927939fb90b0c52f53f57282bfc3dd6e19d53f70004f1f1693e33e5e790ad5ef102b0fce2b243808229d1ce07ae71f326c0e82 - languageName: node - linkType: hard - "supports-color@npm:^5.3.0": version: 5.5.0 resolution: "supports-color@npm:5.5.0" @@ -9661,6 +9586,13 @@ __metadata: languageName: node linkType: hard +"tinyrainbow@npm:^1.2.0": + version: 1.2.0 + resolution: "tinyrainbow@npm:1.2.0" + checksum: 10/2924444db6804355e5ba2b6e586c7f77329d93abdd7257a069a0f4530dff9f16de484e80479094e3f39273462541b003a65ee3a6afc2d12555aa745132deba5d + languageName: node + linkType: hard + "tinyspy@npm:^3.0.0": version: 3.0.0 resolution: "tinyspy@npm:3.0.0" @@ -10280,29 +10212,29 @@ __metadata: languageName: node linkType: hard -"vfile@npm:6.0.1, vfile@npm:^6.0.0, vfile@npm:^6.0.1": - version: 6.0.1 - resolution: "vfile@npm:6.0.1" +"vfile@npm:6.0.2, vfile@npm:^6.0.0, vfile@npm:^6.0.1": + version: 6.0.2 + resolution: "vfile@npm:6.0.2" dependencies: "@types/unist": "npm:^3.0.0" unist-util-stringify-position: "npm:^4.0.0" vfile-message: "npm:^4.0.0" - checksum: 10/7f8412f9ce7709d3be4041fd68a159e2cf96f9c9a4f095bcb18d1561009757b8efb37b71d0ae087e5202fe0e3b3162aae0adf92e30e2448a45645912c23c4ab2 + checksum: 10/8c36b4887b071aa9215a16c96916e96e75f3f3516cb87fa7ba1ec79fda3a1d87b66068e56b73f01c249b8fefa897dc52e3a6c736fd1053133ad3920f33482756 languageName: node linkType: hard -"vite-node@npm:2.0.0-beta.11": - version: 2.0.0-beta.11 - resolution: "vite-node@npm:2.0.0-beta.11" +"vite-node@npm:2.0.4": + version: 2.0.4 + resolution: "vite-node@npm:2.0.4" dependencies: cac: "npm:^6.7.14" debug: "npm:^4.3.5" pathe: "npm:^1.1.2" - picocolors: "npm:^1.0.1" + tinyrainbow: "npm:^1.2.0" vite: "npm:^5.0.0" bin: vite-node: vite-node.mjs - checksum: 10/2069bc30851b86f3cf6af3cf9a66dbc1ac519e3436f37cef21b67530ef8a76a8f9221e6a87d18ee3fc721614132220049a1f2f60a0f3c83ae22f00c51f2755d7 + checksum: 10/27040a5d614fa315cc735867d7e6778640b2dcfb164e1a18d6a275b991a21e99ac6d720448b1b8de6e6d10b8169e79d0cb022807d537246b816f0260eb5f8b15 languageName: node linkType: hard @@ -10362,33 +10294,34 @@ __metadata: languageName: node linkType: hard -"vitest@npm:2.0.0-beta.11": - version: 2.0.0-beta.11 - resolution: "vitest@npm:2.0.0-beta.11" +"vitest@npm:2.0.4": + version: 2.0.4 + resolution: "vitest@npm:2.0.4" dependencies: "@ampproject/remapping": "npm:^2.3.0" - "@vitest/expect": "npm:2.0.0-beta.11" - "@vitest/runner": "npm:2.0.0-beta.11" - "@vitest/snapshot": "npm:2.0.0-beta.11" - "@vitest/spy": "npm:2.0.0-beta.11" - "@vitest/utils": "npm:2.0.0-beta.11" + "@vitest/expect": "npm:2.0.4" + "@vitest/pretty-format": "npm:^2.0.4" + "@vitest/runner": "npm:2.0.4" + "@vitest/snapshot": "npm:2.0.4" + "@vitest/spy": "npm:2.0.4" + "@vitest/utils": "npm:2.0.4" chai: "npm:^5.1.1" debug: "npm:^4.3.5" execa: "npm:^8.0.1" magic-string: "npm:^0.30.10" pathe: "npm:^1.1.2" - picocolors: "npm:^1.0.1" std-env: "npm:^3.7.0" tinybench: "npm:^2.8.0" tinypool: "npm:^1.0.0" + tinyrainbow: "npm:^1.2.0" vite: "npm:^5.0.0" - vite-node: "npm:2.0.0-beta.11" - why-is-node-running: "npm:^2.2.2" + vite-node: "npm:2.0.4" + why-is-node-running: "npm:^2.3.0" peerDependencies: "@edge-runtime/vm": "*" "@types/node": ^18.0.0 || >=20.0.0 - "@vitest/browser": 2.0.0-beta.11 - "@vitest/ui": 2.0.0-beta.11 + "@vitest/browser": 2.0.4 + "@vitest/ui": 2.0.4 happy-dom: "*" jsdom: "*" peerDependenciesMeta: @@ -10406,7 +10339,7 @@ __metadata: optional: true bin: vitest: vitest.mjs - checksum: 10/a6c671be2471b66262f735f236058ca60aeb0de26ce592f750bca2dbd5bab1b20d77c3a2fa2cd796c3a71ceb58558f9ad7fc71f803d36bb4c78e970d31e35d0f + checksum: 10/01a173adbf40273adce5ff0ffd7b538fcc98286b15441651be4a3b9cc48748acf6cedb1f4966b4eff07ed91695847b9352591fd419c2da62181440bc6edf79ee languageName: node linkType: hard @@ -10468,15 +10401,15 @@ __metadata: languageName: node linkType: hard -"why-is-node-running@npm:^2.2.2": - version: 2.2.2 - resolution: "why-is-node-running@npm:2.2.2" +"why-is-node-running@npm:^2.3.0": + version: 2.3.0 + resolution: "why-is-node-running@npm:2.3.0" dependencies: siginfo: "npm:^2.0.0" stackback: "npm:0.0.2" bin: why-is-node-running: cli.js - checksum: 10/f3582e0337f4b25537d492b1d40f00b978ce04b1d1eeea8f310bfa8aae8a7d11d118d672e2f0760c164ce3753a620a70aa29ff3620e340197624940cf9c08615 + checksum: 10/0de6e6cd8f2f94a8b5ca44e84cf1751eadcac3ebedcdc6e5fbbe6c8011904afcbc1a2777c53496ec02ced7b81f2e7eda61e76bf8262a8bc3ceaa1f6040508051 languageName: node linkType: hard From a844b1a6c933cd77191563b09020b82f3fffb897 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 12:27:41 +0000 Subject: [PATCH 2/2] build(deps-dev): Bump typescript-eslint from 8.0.0-alpha.30 to 8.0.1 Bumps [typescript-eslint](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/typescript-eslint) from 8.0.0-alpha.30 to 8.0.1. - [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases) - [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/typescript-eslint/CHANGELOG.md) - [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.0.1/packages/typescript-eslint) --- updated-dependencies: - dependency-name: typescript-eslint dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- package.json | 2 +- yarn.lock | 116 +++++++++++++++++++++++++-------------------------- 2 files changed, 59 insertions(+), 59 deletions(-) diff --git a/package.json b/package.json index 85246b4..9d9e005 100644 --- a/package.json +++ b/package.json @@ -147,7 +147,7 @@ "trash-cli": "5.0.0", "ts-dedent": "2.2.0", "typescript": "5.5.2", - "typescript-eslint": "8.0.0-alpha.30", + "typescript-eslint": "8.0.1", "vfile": "6.0.2", "vite-tsconfig-paths": "4.3.2", "vitest": "2.0.4", diff --git a/yarn.lock b/yarn.lock index 3b73000..a49d04e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1785,7 +1785,7 @@ __metadata: trash-cli: "npm:5.0.0" ts-dedent: "npm:2.2.0" typescript: "npm:5.5.2" - typescript-eslint: "npm:8.0.0-alpha.30" + typescript-eslint: "npm:8.0.1" vfile: "npm:6.0.2" vite-tsconfig-paths: "npm:4.3.2" vitest: "npm:2.0.4" @@ -2514,15 +2514,15 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/eslint-plugin@npm:8.0.0-alpha.30": - version: 8.0.0-alpha.30 - resolution: "@typescript-eslint/eslint-plugin@npm:8.0.0-alpha.30" +"@typescript-eslint/eslint-plugin@npm:8.0.1": + version: 8.0.1 + resolution: "@typescript-eslint/eslint-plugin@npm:8.0.1" dependencies: "@eslint-community/regexpp": "npm:^4.10.0" - "@typescript-eslint/scope-manager": "npm:8.0.0-alpha.30" - "@typescript-eslint/type-utils": "npm:8.0.0-alpha.30" - "@typescript-eslint/utils": "npm:8.0.0-alpha.30" - "@typescript-eslint/visitor-keys": "npm:8.0.0-alpha.30" + "@typescript-eslint/scope-manager": "npm:8.0.1" + "@typescript-eslint/type-utils": "npm:8.0.1" + "@typescript-eslint/utils": "npm:8.0.1" + "@typescript-eslint/visitor-keys": "npm:8.0.1" graphemer: "npm:^1.4.0" ignore: "npm:^5.3.1" natural-compare: "npm:^1.4.0" @@ -2533,25 +2533,25 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 10/8d38f0d350d1a78ae4ab0295c1891ee4dd9a4a5287b9f7cc3d8399e731d4e4d96abf947166293af9c9221291ca6d9937e5427b3d44b6d98005c1ddb5e1ab4a14 + checksum: 10/eceb49205734a2838734b11f5c6e0bdea807859426d8bef6fbd6eebcf3df389c7ff31114ad9caf3a440ea36a62d44dd4ca8c0313a57eeccce194d28da7fbe7c2 languageName: node linkType: hard -"@typescript-eslint/parser@npm:8.0.0-alpha.30": - version: 8.0.0-alpha.30 - resolution: "@typescript-eslint/parser@npm:8.0.0-alpha.30" +"@typescript-eslint/parser@npm:8.0.1": + version: 8.0.1 + resolution: "@typescript-eslint/parser@npm:8.0.1" dependencies: - "@typescript-eslint/scope-manager": "npm:8.0.0-alpha.30" - "@typescript-eslint/types": "npm:8.0.0-alpha.30" - "@typescript-eslint/typescript-estree": "npm:8.0.0-alpha.30" - "@typescript-eslint/visitor-keys": "npm:8.0.0-alpha.30" + "@typescript-eslint/scope-manager": "npm:8.0.1" + "@typescript-eslint/types": "npm:8.0.1" + "@typescript-eslint/typescript-estree": "npm:8.0.1" + "@typescript-eslint/visitor-keys": "npm:8.0.1" debug: "npm:^4.3.4" peerDependencies: eslint: ^8.57.0 || ^9.0.0 peerDependenciesMeta: typescript: optional: true - checksum: 10/7997ede700b98581e74160e9596b60985acffd2cbc5b9bf790dd10a2d001adc837dd18978a8923173df7f7f9d928290e9ef8aa42146e7d65ffae23ea4bc85c43 + checksum: 10/d483e236d13e40f00cb6590b956caee9ea4a68c0bc338aad7463a0183e1983d08d1a31b5f9107641d06cd7bcc55d5b7cb7d48d42d9cf3c4996573d798128ec46 languageName: node linkType: hard @@ -2565,28 +2565,28 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/scope-manager@npm:8.0.0-alpha.30": - version: 8.0.0-alpha.30 - resolution: "@typescript-eslint/scope-manager@npm:8.0.0-alpha.30" +"@typescript-eslint/scope-manager@npm:8.0.1": + version: 8.0.1 + resolution: "@typescript-eslint/scope-manager@npm:8.0.1" dependencies: - "@typescript-eslint/types": "npm:8.0.0-alpha.30" - "@typescript-eslint/visitor-keys": "npm:8.0.0-alpha.30" - checksum: 10/9ca4c7ef13e3f68b829a8446d8d2bae150d87dd2c2ca3dea44f8bd22056ba6d13c6f69e8d9891712d94410e7e365cef79a4d05fa33bd0665cd96dcfe508c3143 + "@typescript-eslint/types": "npm:8.0.1" + "@typescript-eslint/visitor-keys": "npm:8.0.1" + checksum: 10/e4509f69390dd51f87e9a998d96047330cb1d23262fdc6f4cf7c9475e10faf0a85cc19324d1a51102fcda5dbef5621395336177d55de7e1fe8a222e1823b9a43 languageName: node linkType: hard -"@typescript-eslint/type-utils@npm:8.0.0-alpha.30": - version: 8.0.0-alpha.30 - resolution: "@typescript-eslint/type-utils@npm:8.0.0-alpha.30" +"@typescript-eslint/type-utils@npm:8.0.1": + version: 8.0.1 + resolution: "@typescript-eslint/type-utils@npm:8.0.1" dependencies: - "@typescript-eslint/typescript-estree": "npm:8.0.0-alpha.30" - "@typescript-eslint/utils": "npm:8.0.0-alpha.30" + "@typescript-eslint/typescript-estree": "npm:8.0.1" + "@typescript-eslint/utils": "npm:8.0.1" debug: "npm:^4.3.4" ts-api-utils: "npm:^1.3.0" peerDependenciesMeta: typescript: optional: true - checksum: 10/4f69cd52e1ead98a5a09d75ff1b1ad2848f937a0c7bd3a777ee155354e44c99f76548337c3c9f5db4a843ea0b6cab3d4bd95ef84be8bf9ee3359ba3c02d60931 + checksum: 10/228a6bfc9c81d2acadab28dd968d43477507d7811a3cef2755003c1b61a17e579ca1fc53ad9b18bedf08591c70bf5e443a8c7e85a7228ee3e7d16c908b1b3be8 languageName: node linkType: hard @@ -2597,10 +2597,10 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/types@npm:8.0.0-alpha.30": - version: 8.0.0-alpha.30 - resolution: "@typescript-eslint/types@npm:8.0.0-alpha.30" - checksum: 10/2cd0ee0258cd0c61aa1475944e4545d7a982e6a6aa7606f1328ff3253fd368ef9799e6dd60a81ec61101214d30f5ee2254abbcdaec2b671352b9cd5908f9cba8 +"@typescript-eslint/types@npm:8.0.1": + version: 8.0.1 + resolution: "@typescript-eslint/types@npm:8.0.1" + checksum: 10/821ed735ff34da599315eadc3145898f02d5fea850979ed5b27648be0c025fdca3a6f8965f31dc290425eeda7c320d278ac60838f43580dc0173bd6be384051a languageName: node linkType: hard @@ -2623,12 +2623,12 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/typescript-estree@npm:8.0.0-alpha.30": - version: 8.0.0-alpha.30 - resolution: "@typescript-eslint/typescript-estree@npm:8.0.0-alpha.30" +"@typescript-eslint/typescript-estree@npm:8.0.1": + version: 8.0.1 + resolution: "@typescript-eslint/typescript-estree@npm:8.0.1" dependencies: - "@typescript-eslint/types": "npm:8.0.0-alpha.30" - "@typescript-eslint/visitor-keys": "npm:8.0.0-alpha.30" + "@typescript-eslint/types": "npm:8.0.1" + "@typescript-eslint/visitor-keys": "npm:8.0.1" debug: "npm:^4.3.4" globby: "npm:^11.1.0" is-glob: "npm:^4.0.3" @@ -2638,21 +2638,21 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 10/cbd7821fd2500afbb1bcd61afcf39950288c95479112c34dfc4d3d866a261c1816baba22330866873eedc6a37af559e7fcfdac2429438350fd6da1ad11ddba2e + checksum: 10/f0888381faaf6f1394adec1286c606dc41d8e27f1591d3fb20750c17e236f282627bf6c18b1ba34bf97e9af03f99b6e4b10a7625f615496cd506595da0c21186 languageName: node linkType: hard -"@typescript-eslint/utils@npm:8.0.0-alpha.30": - version: 8.0.0-alpha.30 - resolution: "@typescript-eslint/utils@npm:8.0.0-alpha.30" +"@typescript-eslint/utils@npm:8.0.1": + version: 8.0.1 + resolution: "@typescript-eslint/utils@npm:8.0.1" dependencies: "@eslint-community/eslint-utils": "npm:^4.4.0" - "@typescript-eslint/scope-manager": "npm:8.0.0-alpha.30" - "@typescript-eslint/types": "npm:8.0.0-alpha.30" - "@typescript-eslint/typescript-estree": "npm:8.0.0-alpha.30" + "@typescript-eslint/scope-manager": "npm:8.0.1" + "@typescript-eslint/types": "npm:8.0.1" + "@typescript-eslint/typescript-estree": "npm:8.0.1" peerDependencies: eslint: ^8.57.0 || ^9.0.0 - checksum: 10/51ad6d641b76aeb14ccb8396eef3e9828413e56b949ab922291f7144c9c019d28ebc11e9b08b9d825abfb45350dedeff4aad501280d4f7508cb8468584048372 + checksum: 10/e359a9e95d0b3f8dbccc3681607748f96b332667a882a5635a9876814159b8a723da7138f7fd890cf0c414c46257a8362d5a55a3bad78bc37743ee814c7a8de0 languageName: node linkType: hard @@ -2680,13 +2680,13 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/visitor-keys@npm:8.0.0-alpha.30": - version: 8.0.0-alpha.30 - resolution: "@typescript-eslint/visitor-keys@npm:8.0.0-alpha.30" +"@typescript-eslint/visitor-keys@npm:8.0.1": + version: 8.0.1 + resolution: "@typescript-eslint/visitor-keys@npm:8.0.1" dependencies: - "@typescript-eslint/types": "npm:8.0.0-alpha.30" + "@typescript-eslint/types": "npm:8.0.1" eslint-visitor-keys: "npm:^3.4.3" - checksum: 10/7700cfd08808b204319a914d6089510626693e12c73ce9a80aa5e10ff459908fad048fb4b3dceedd8fff893817375ed48e69b163cd7beb5fd5edaa34df904077 + checksum: 10/489da338e19422eadb3b29fcf4d594ed00534faa129f52419bf9eb5733b0a1c11198d18e8d089fa0cc204370c2d2dd1834157a183d1e3e94df41378c5a606545 languageName: node linkType: hard @@ -9821,17 +9821,17 @@ __metadata: languageName: node linkType: hard -"typescript-eslint@npm:8.0.0-alpha.30": - version: 8.0.0-alpha.30 - resolution: "typescript-eslint@npm:8.0.0-alpha.30" +"typescript-eslint@npm:8.0.1": + version: 8.0.1 + resolution: "typescript-eslint@npm:8.0.1" dependencies: - "@typescript-eslint/eslint-plugin": "npm:8.0.0-alpha.30" - "@typescript-eslint/parser": "npm:8.0.0-alpha.30" - "@typescript-eslint/utils": "npm:8.0.0-alpha.30" + "@typescript-eslint/eslint-plugin": "npm:8.0.1" + "@typescript-eslint/parser": "npm:8.0.1" + "@typescript-eslint/utils": "npm:8.0.1" peerDependenciesMeta: typescript: optional: true - checksum: 10/cf034b7fd5dfdaf4075c90937dc69f601b68047c9aa589c0cf2acb2adfd01f454e9c4b92f1547bf046d805244ffa80e61c53212a96c3db19014fb7c1160cefae + checksum: 10/441f41ac8657e3a796f9acc5c47b57098124b5b8203f5b20d9a954980101fddce5039851f5919da490459132eb69c1bac643d3671bc50dea36dd20f2d3a654d1 languageName: node linkType: hard