diff --git a/packages/css-tokenizer/test/css-tokenizer/at-keyword-token.test.ts b/packages/css-tokenizer/test/css-tokenizer/at-keyword-token.test.ts index 486648fe6..27c599d11 100644 --- a/packages/css-tokenizer/test/css-tokenizer/at-keyword-token.test.ts +++ b/packages/css-tokenizer/test/css-tokenizer/at-keyword-token.test.ts @@ -1,7 +1,5 @@ import { TokenType } from '../../src/common/enums/token-types'; -import { tokenize } from '../../src/css-tokenizer'; -import type { TokenData } from '../helpers/test-interfaces'; -import { addAsProp } from '../helpers/test-utils'; +import { addAsProp, testTokenization } from '../helpers/test-utils'; describe('at-keyword-token', () => { test.each(addAsProp([ @@ -20,9 +18,5 @@ describe('at-keyword-token', () => { [TokenType.Semicolon, 16, 17], ], }, - ]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ]))('should tokenize \'$actual\' as \'$as\'', testTokenization); }); diff --git a/packages/css-tokenizer/test/css-tokenizer/cdo-and-cdc-tokens.test.ts b/packages/css-tokenizer/test/css-tokenizer/cdo-and-cdc-tokens.test.ts index 8bd70360e..46b209dfc 100644 --- a/packages/css-tokenizer/test/css-tokenizer/cdo-and-cdc-tokens.test.ts +++ b/packages/css-tokenizer/test/css-tokenizer/cdo-and-cdc-tokens.test.ts @@ -1,7 +1,5 @@ import { TokenType } from '../../src/common/enums/token-types'; -import { tokenize } from '../../src/css-tokenizer'; -import { type TokenData } from '../helpers/test-interfaces'; -import { addAsProp } from '../helpers/test-utils'; +import { addAsProp, testTokenization } from '../helpers/test-utils'; describe('cdo-token and cdc-token', () => { test.each(addAsProp([ @@ -17,9 +15,5 @@ describe('cdo-token and cdc-token', () => { [TokenType.Cdc, 0, 3], ], }, - ]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ]))('should tokenize \'$actual\' as \'$as\'', testTokenization); }); diff --git a/packages/css-tokenizer/test/css-tokenizer/comment-token.test.ts b/packages/css-tokenizer/test/css-tokenizer/comment-token.test.ts index 160828907..480c1a671 100644 --- a/packages/css-tokenizer/test/css-tokenizer/comment-token.test.ts +++ b/packages/css-tokenizer/test/css-tokenizer/comment-token.test.ts @@ -1,7 +1,5 @@ import { TokenType } from '../../src/common/enums/token-types'; -import { tokenize } from '../../src/css-tokenizer'; -import type { TokenData } from '../helpers/test-interfaces'; -import { addAsProp } from '../helpers/test-utils'; +import { addAsProp, testTokenization } from '../helpers/test-utils'; describe('comment-token', () => { test.each(addAsProp([ @@ -46,9 +44,5 @@ describe('comment-token', () => { [TokenType.Comment, 32, 44], ], }, - ]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ]))('should tokenize \'$actual\' as \'$as\'', testTokenization); }); diff --git a/packages/css-tokenizer/test/css-tokenizer/delim-token.test.ts b/packages/css-tokenizer/test/css-tokenizer/delim-token.test.ts index ba84acf35..ba4b4312e 100644 --- a/packages/css-tokenizer/test/css-tokenizer/delim-token.test.ts +++ b/packages/css-tokenizer/test/css-tokenizer/delim-token.test.ts @@ -1,7 +1,6 @@ import { TokenType } from '../../src/common/enums/token-types'; -import { tokenize } from '../../src/css-tokenizer'; -import type { TokenData } from '../helpers/test-interfaces'; -import { addAsProp } from '../helpers/test-utils'; +import type { TokenTest } from '../helpers/test-interfaces'; +import { addAsProp, testTokenization } from '../helpers/test-utils'; describe('delim-token', () => { // Tokenize any unknown character as @@ -19,11 +18,7 @@ describe('delim-token', () => { [TokenType.Delim, 0, 1], ], }, - ]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ] as TokenTest[]))('should tokenize \'$actual\' as \'$as\'', testTokenization); }); describe("should tokenize '#' as if it isn't followed by a name or a hex digit", () => { @@ -49,11 +44,7 @@ describe('delim-token', () => { [TokenType.Number, 2, 3], ], }, - ])("should tokenize '#' as in '$actual'", ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ] as TokenTest[])("should tokenize '#' as in '$actual'", testTokenization); }); describe("should tokenize '+' as if it isn't a part of a number", () => { @@ -79,11 +70,7 @@ describe('delim-token', () => { [TokenType.Number, 2, 3], ], }, - ])("should tokenize '+' as in '$actual'", ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ] as TokenTest[])("should tokenize '+' as in '$actual'", testTokenization); }); describe("should tokenize '-' as if it isn't a part of a number, CDC or ident", () => { @@ -117,11 +104,7 @@ describe('delim-token', () => { [TokenType.Ident, 2, 3], ], }, - ])("should tokenize '-' as in '$actual'", ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ] as TokenTest[])("should tokenize '-' as in '$actual'", testTokenization); }); describe("should tokenize '.' as if it isn't a part of a number", () => { @@ -147,11 +130,7 @@ describe('delim-token', () => { [TokenType.Number, 2, 3], ], }, - ])("should tokenize '.' as in '$actual'", ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ] as TokenTest[])("should tokenize '.' as in '$actual'", testTokenization); }); describe("should tokenize '<' as if it isn't a part of a CDO", () => { @@ -178,11 +157,7 @@ describe('delim-token', () => { [TokenType.Ident, 3, 5], ], }, - ])("should tokenize '<' as in '$actual'", ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ] as TokenTest[])("should tokenize '<' as in '$actual'", testTokenization); }); describe("should tokenize '@' as if it isn't a part of an at-keyword", () => { @@ -208,11 +183,7 @@ describe('delim-token', () => { [TokenType.Ident, 2, 9], ], }, - ])("should tokenize '@' as in '$actual'", ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ] as TokenTest[])("should tokenize '@' as in '$actual'", testTokenization); }); describe("should tokenize '/' as if it isn't a part of a comment mark", () => { @@ -238,11 +209,7 @@ describe('delim-token', () => { [TokenType.Delim, 2, 3], ], }, - ])("should tokenize '/' as in '$actual'", ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ] as TokenTest[])("should tokenize '/' as in '$actual'", testTokenization); }); describe("should tokenize '\\' as if it isn't a valid escape", () => { @@ -254,10 +221,6 @@ describe('delim-token', () => { [TokenType.Whitespace, 1, 2], ], }, - ])("should tokenize '\\' as in '$actual'", ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ] as TokenTest[])("should tokenize '\\' as in '$actual'", testTokenization); }); }); diff --git a/packages/css-tokenizer/test/css-tokenizer/hash-token.test.ts b/packages/css-tokenizer/test/css-tokenizer/hash-token.test.ts index ca3df9ffb..50621e518 100644 --- a/packages/css-tokenizer/test/css-tokenizer/hash-token.test.ts +++ b/packages/css-tokenizer/test/css-tokenizer/hash-token.test.ts @@ -1,7 +1,5 @@ import { TokenType } from '../../src/common/enums/token-types'; -import { tokenize } from '../../src/css-tokenizer'; -import type { TokenData } from '../helpers/test-interfaces'; -import { addAsProp } from '../helpers/test-utils'; +import { addAsProp, testTokenization } from '../helpers/test-utils'; describe('hash-token', () => { test.each(addAsProp([ @@ -17,9 +15,5 @@ describe('hash-token', () => { [TokenType.Hash, 0, 10], ], }, - ]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ]))('should tokenize \'$actual\' as \'$as\'', testTokenization); }); diff --git a/packages/css-tokenizer/test/css-tokenizer/numeric-token.test.ts b/packages/css-tokenizer/test/css-tokenizer/numeric-token.test.ts index abdb56692..63404f51f 100644 --- a/packages/css-tokenizer/test/css-tokenizer/numeric-token.test.ts +++ b/packages/css-tokenizer/test/css-tokenizer/numeric-token.test.ts @@ -1,7 +1,5 @@ import { TokenType } from '../../src/common/enums/token-types'; -import { tokenize } from '../../src/css-tokenizer'; -import type { TokenData } from '../helpers/test-interfaces'; -import { addAsProp } from '../helpers/test-utils'; +import { addAsProp, testTokenization } from '../helpers/test-utils'; describe('numeric-token', () => { // Test data from https://developer.mozilla.org/en-US/docs/Web/CSS/number @@ -63,9 +61,5 @@ describe('numeric-token', () => { [TokenType.Dimension, 0, value.length + 3], ], }))), - ])('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ])('should tokenize \'$actual\' as \'$as\'', testTokenization); }); diff --git a/packages/css-tokenizer/test/css-tokenizer/string-token.test.ts b/packages/css-tokenizer/test/css-tokenizer/string-token.test.ts index d32d34664..f25184c79 100644 --- a/packages/css-tokenizer/test/css-tokenizer/string-token.test.ts +++ b/packages/css-tokenizer/test/css-tokenizer/string-token.test.ts @@ -1,7 +1,5 @@ import { TokenType } from '../../src/common/enums/token-types'; -import { tokenize } from '../../src/css-tokenizer'; -import type { TokenData } from '../helpers/test-interfaces'; -import { addAsProp } from '../helpers/test-utils'; +import { addAsProp, testTokenization } from '../helpers/test-utils'; describe('string-token and bad-string-token', () => { test.each(addAsProp([ @@ -111,9 +109,5 @@ describe('string-token and bad-string-token', () => { [TokenType.String, 0, 10], ], }, - ]))('should tokenize $actual as $as', ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ]))('should tokenize $actual as $as', testTokenization); }); diff --git a/packages/css-tokenizer/test/css-tokenizer/trivial-tokens.test.ts b/packages/css-tokenizer/test/css-tokenizer/trivial-tokens.test.ts index 3a41a9c57..5a52fbc3a 100644 --- a/packages/css-tokenizer/test/css-tokenizer/trivial-tokens.test.ts +++ b/packages/css-tokenizer/test/css-tokenizer/trivial-tokens.test.ts @@ -1,7 +1,5 @@ import { TokenType } from '../../src/common/enums/token-types'; -import { tokenize } from '../../src/css-tokenizer'; -import type { TokenData } from '../helpers/test-interfaces'; -import { addAsProp } from '../helpers/test-utils'; +import { addAsProp, testTokenization } from '../helpers/test-utils'; describe('trivial tokens', () => { test.each(addAsProp([ @@ -59,9 +57,5 @@ describe('trivial tokens', () => { [TokenType.Semicolon, 0, 1], ], }, - ]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ]))('should tokenize \'$actual\' as \'$as\'', testTokenization); }); diff --git a/packages/css-tokenizer/test/css-tokenizer/url-token.test.ts b/packages/css-tokenizer/test/css-tokenizer/url-token.test.ts index 8fc8a98a3..e4800fb90 100644 --- a/packages/css-tokenizer/test/css-tokenizer/url-token.test.ts +++ b/packages/css-tokenizer/test/css-tokenizer/url-token.test.ts @@ -2,7 +2,7 @@ import { CodePoint } from '../../src/common/enums/code-points'; import { TokenType } from '../../src/common/enums/token-types'; import { tokenize } from '../../src/css-tokenizer'; import type { TokenData } from '../helpers/test-interfaces'; -import { addAsProp } from '../helpers/test-utils'; +import { addAsProp, testTokenization } from '../helpers/test-utils'; describe('URLs', () => { describe('should tokenize valid inputs as ', () => { @@ -126,11 +126,7 @@ describe('URLs', () => { [TokenType.CloseParenthesis, 24, 25], ], }, - ]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ]))('should tokenize \'$actual\' as \'$as\'', testTokenization); }); describe('should tokenize invalid inputs as ', () => { diff --git a/packages/css-tokenizer/test/css-tokenizer/whitespace-token.test.ts b/packages/css-tokenizer/test/css-tokenizer/whitespace-token.test.ts index 97bb6d510..39b8b12f4 100644 --- a/packages/css-tokenizer/test/css-tokenizer/whitespace-token.test.ts +++ b/packages/css-tokenizer/test/css-tokenizer/whitespace-token.test.ts @@ -1,7 +1,5 @@ import { TokenType } from '../../src/common/enums/token-types'; -import { tokenize } from '../../src/css-tokenizer'; -import type { TokenData } from '../helpers/test-interfaces'; -import { addAsProp } from '../helpers/test-utils'; +import { addAsProp, testTokenization } from '../helpers/test-utils'; describe('whitespace', () => { test.each(addAsProp([ @@ -54,9 +52,5 @@ describe('whitespace', () => { [TokenType.Whitespace, 0, 2], ], }, - ]))('should tokenize \'$name\' as \'as\'', ({ actual, expected }) => { - const tokens: TokenData[] = []; - tokenize(actual, (...args) => tokens.push(args)); - expect(tokens).toEqual(expected); - }); + ]))('should tokenize \'$name\' as \'as\'', testTokenization); }); diff --git a/packages/css-tokenizer/test/helpers/test-utils.ts b/packages/css-tokenizer/test/helpers/test-utils.ts index dcb8f5dd4..2fcba1081 100644 --- a/packages/css-tokenizer/test/helpers/test-utils.ts +++ b/packages/css-tokenizer/test/helpers/test-utils.ts @@ -1,6 +1,8 @@ import { getFormattedTokenName } from '../../src/utils/token-names'; import { type TokenType } from '../../src/common/enums/token-types'; -import { type TokenTest } from './test-interfaces'; +import { type TokenData, type TokenTest } from './test-interfaces'; +import { tokenize } from '../../src/css-tokenizer'; +import { type tokenizeExtended } from '../../src/extended-css-tokenizer'; const SEPARATOR = ', '; @@ -29,3 +31,15 @@ export const addAsProp = (tests: TokenTest[]): TokenTest[] => { return tests; }; + +/** + * Helper function to test tokenization, it is enough in most cases. + * + * @param test Token test + * @param fn Tokenizer function + */ +export const testTokenization = (test: TokenTest, fn: typeof tokenize | typeof tokenizeExtended = tokenize): void => { + const tokens: TokenData[] = []; + fn(test.actual, (...args) => tokens.push(args)); + expect(tokens).toEqual(test.expected); +};