Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ci(deps): Bump actions/add-to-project from 1.0.1 to 1.0.2 #12

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .github/infrastructure.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,6 @@ branches:
- context: test (18)
- context: test (19)
- context: test (20)
- context: typescript (5.3.3)
- context: typescript (5.4.5)
- context: typescript (5.5.2)
- context: typescript (latest)
strict: true
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/add-to-project.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
private-key: ${{ secrets.BOT_PRIVATE_KEY }}
- id: add-item
name: Add ${{ format('#{0}', github.event.number) }} to project
uses: actions/add-to-project@v1.0.1
uses: actions/add-to-project@v1.0.2
with:
github-token: ${{ steps.bot-token.outputs.token }}
project-url: |
Expand Down
2 changes: 0 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -245,8 +245,6 @@ jobs:
matrix:
typescript-version:
- ${{ needs.preflight.outputs.version-typescript }}
- 5.4.5
- 5.3.3
- latest
steps:
- id: checkout
Expand Down
20 changes: 12 additions & 8 deletions src/__tests__/tokenize.integration.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import type { Options } from '#src/interfaces'
import { inlineTag, numeric, punctuator, string, ws } from '#tests/constructs'
import token from '#tests/utils/token'
import { identity } from '@flex-development/tutils'
import { codes } from '@flex-development/vfile-reader'
import { readSync as read } from 'to-vfile'
import type { VFile, Value } from 'vfile'
import type Lexer from '../lexer'
Expand Down Expand Up @@ -47,18 +48,21 @@ describe('integration:tokenize', () => {
describe('non-empty file', () => {
it.each<[VFile, (Partial<Options> | null | undefined)?]>([
[read('__fixtures__/inline-tag.txt'), {
constructs: [inlineTag, ws],
constructs: {
[codes.cr]: ws,
[codes.leftBrace]: inlineTag,
[codes.lf]: ws,
[codes.space]: ws
},
context: vi.fn(),
disabled: [tk.whitespace],
initialize: {
name: initialize.name,
resolveAll: vi.fn(identity),
tokenize: initialize.tokenize
}
disabled: [tk.whitespace]
}],
[read('__fixtures__/hello.txt'), {
constructs: [string, punctuator],
context: vi.fn(identity)
context: vi.fn(identity),
initialize: Object.assign(initialize([string, punctuator]), {
resolveAll: vi.fn(identity)
})
}],
[read('__fixtures__/strings.txt'), {
constructs: [string, punctuator]
Expand Down
120 changes: 43 additions & 77 deletions src/constructs/initialize.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,98 +4,64 @@
*/

import { tt } from '#src/enums'
import type {
Construct,
InitialConstruct,
TokenizeContext
} from '#src/interfaces'
import type { Effects, State } from '#src/types'
import type { Code } from '@flex-development/vfile-reader'
import type { InitialConstruct, TokenizeContext } from '#src/interfaces'
import type { Constructs, Effects, State } from '#src/types'
import { codes, type Code } from '@flex-development/vfile-reader'
import eof from './eof'

/**
* Initialization construct.
* Create an initial construct.
*
* @const {InitialConstruct} initialize
* @see {@linkcode Constructs}
* @see {@linkcode InitialConstruct}
*
* @param {Constructs} constructs - Construct(s) to try
* @return {InitialConstruct} Initial construct
*/
const initialize: InitialConstruct = {
/**
* Construct name.
*/
name: 'vfile-lexer:initialize',

/**
* Set up a state machine to handle character codes streaming in.
*
* @see {@linkcode Effects}
* @see {@linkcode State}
* @see {@linkcode TokenizeContext}
*
* @this {TokenizeContext}
*
* @param {Effects} effects - Context object to transition state machine
* @return {State} Initial state
*/
tokenize(this: TokenizeContext, effects: Effects): State {
function initialize(constructs: Constructs): InitialConstruct {
return {
/**
* Tokenize context.
*
* @const {TokenizeContext} self
* Construct name.
*/
const self: TokenizeContext = this
name: 'vfile-lexer:initialize',

/**
* List of constructs.
* Set up a state machine to handle character codes streaming in.
*
* @const {Construct[]} constructs
*/
const constructs: Construct[] = [eof, ...self.constructs]

/**
* Try to tokenize a list of constructs.
* @see {@linkcode Effects}
* @see {@linkcode State}
* @see {@linkcode TokenizeContext}
*
* @var {State} state
*/
let state: State = effects.attempt(constructs, succ, fail)

void (effects.enter(tt.sof), effects.exit(tt.sof))
return succ

/**
* Eat `code`.
* @this {TokenizeContext}
*
* @param {Code} code - Current character code
* @return {State | undefined} Next state
* @param {Effects} effects - Context object to transition state machine
* @return {State} Initial state
*/
function eat(code: Code): State | undefined {
return effects.consume(code), state
}
tokenize(this: TokenizeContext, effects: Effects): State {
void (effects.enter(tt.sof), effects.exit(tt.sof))
return state

/**
* Try tokenizing the next construct, and move onto the next character code
* if all constructs fail.
*
* @param {Code} code - Current character code
* @return {State | undefined} Next state
*/
function fail(code: Code): State | undefined {
return effects.attempt(
constructs,
succ,
constructs.indexOf(self.construct!) === constructs.length - 1
? eat
: fail
)(code)
}
/**
* Consume `code` and try tokenizing the next construct.
*
* @param {Code} code - Current character code
* @return {State | undefined} Next state
*/
function eat(code: Code): State | undefined {
return code === codes.eof
? effects.attempt(eof)(code)
: (effects.consume(code), state)
}

/**
* Try tokenizing the next construct.
*
* @param {Code} code - Current character code
* @return {State | undefined} Next state
*/
function succ(code: Code): State | undefined {
return (state = effects.attempt(constructs, succ, fail))(code)
/**
* Try to tokenize a construct.
*
* @param {Code} code - Current character code
* @return {State | undefined} Next state
*/
function state(code: Code): State | undefined {
return effects.attempt(constructs, state, eat)(code)
}
}
}
}
Expand Down
7 changes: 3 additions & 4 deletions src/interfaces/__tests__/options.spec-d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,17 @@
* @module vfile-lexer/interfaces/tests/unit-d/Options
*/

import type { FinalizeContext, TokenFactory } from '#src/types'
import type { Constructs, FinalizeContext, TokenFactory } from '#src/types'
import type { Nilable } from '@flex-development/tutils'
import type { Point } from '@flex-development/vfile-reader'
import type Construct from '../construct'
import type InitialConstruct from '../construct-initial'
import type TestSubject from '../options'

describe('unit-d:interfaces/Options', () => {
it('should match [constructs?: readonly Construct[] | null | undefined]', () => {
it('should match [constructs?: Constructs | null | undefined]', () => {
expectTypeOf<TestSubject>()
.toHaveProperty('constructs')
.toEqualTypeOf<Nilable<readonly Construct[]>>()
.toEqualTypeOf<Nilable<Constructs>>()
})

it('should match [context?: FinalizeContext | null | undefined]', () => {
Expand Down
10 changes: 2 additions & 8 deletions src/interfaces/__tests__/tokenize-context.spec-d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,18 +25,12 @@ describe('unit-d:interfaces/TokenizeContext', () => {
expectTypeOf<TestSubject>().toHaveProperty('code').toEqualTypeOf<Code>()
})

it('should match [construct?: Construct | null | undefined]', () => {
it('should match [currentConstruct?: Construct | null | undefined]', () => {
expectTypeOf<TestSubject>()
.toHaveProperty('construct')
.toHaveProperty('currentConstruct')
.toEqualTypeOf<Nilable<Construct>>()
})

it('should match [constructs: readonly Construct[]]', () => {
expectTypeOf<TestSubject>()
.toHaveProperty('constructs')
.toEqualTypeOf<readonly Construct[]>()
})

it('should match [disabled: readonly string[]]', () => {
expectTypeOf<TestSubject>()
.toHaveProperty('disabled')
Expand Down
10 changes: 3 additions & 7 deletions src/interfaces/options.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,8 @@
* @module vfile-lexer/interfaces/Options
*/

import type {
FinalizeContext,
TokenFactory
} from '#src/types'
import type { Constructs, FinalizeContext, TokenFactory } from '#src/types'
import type { Point } from '@flex-development/vfile-reader'
import type Construct from './construct'
import type InitialConstruct from './construct-initial'

/**
Expand All @@ -18,9 +14,9 @@ interface Options {
/**
* Constructs.
*
* @see {@linkcode Construct}
* @see {@linkcode Constructs}
*/
constructs?: readonly Construct[] | null | undefined
constructs?: Constructs | null | undefined

/**
* Finalize the tokenization context.
Expand Down
11 changes: 3 additions & 8 deletions src/interfaces/tokenize-context.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,18 +33,13 @@ interface TokenizeContext {
get code(): Code

/**
* Current construct.
* The current construct.
*
* @see {@linkcode Construct}
*/
construct?: Construct | null | undefined

/**
* All constructs.
* Constructs that are not `partial` are set here.
*
* @see {@linkcode Construct}
*/
constructs: readonly Construct[]
currentConstruct?: Construct | null | undefined

/**
* Disabled construct names.
Expand Down
Loading
Loading