Skip to content

Commit

Permalink
Reuse main test logic
Browse files Browse the repository at this point in the history
  • Loading branch information
scripthunter7 committed Oct 17, 2023
1 parent 6a775df commit 5722aae
Show file tree
Hide file tree
Showing 11 changed files with 44 additions and 119 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';
import { addAsProp, testTokenization } from '../helpers/test-utils';

describe('at-keyword-token', () => {
test.each(addAsProp([
Expand All @@ -20,9 +18,5 @@ describe('at-keyword-token', () => {
[TokenType.Semicolon, 16, 17],
],
},
]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
]))('should tokenize \'$actual\' as \'$as\'', testTokenization);
});
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import { type TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';
import { addAsProp, testTokenization } from '../helpers/test-utils';

describe('cdo-token and cdc-token', () => {
test.each(addAsProp([
Expand All @@ -17,9 +15,5 @@ describe('cdo-token and cdc-token', () => {
[TokenType.Cdc, 0, 3],
],
},
]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
]))('should tokenize \'$actual\' as \'$as\'', testTokenization);
});
10 changes: 2 additions & 8 deletions packages/css-tokenizer/test/css-tokenizer/comment-token.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';
import { addAsProp, testTokenization } from '../helpers/test-utils';

describe('comment-token', () => {
test.each(addAsProp([
Expand Down Expand Up @@ -46,9 +44,5 @@ describe('comment-token', () => {
[TokenType.Comment, 32, 44],
],
},
]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
]))('should tokenize \'$actual\' as \'$as\'', testTokenization);
});
59 changes: 11 additions & 48 deletions packages/css-tokenizer/test/css-tokenizer/delim-token.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';
import type { TokenTest } from '../helpers/test-interfaces';
import { addAsProp, testTokenization } from '../helpers/test-utils';

describe('delim-token', () => {
// Tokenize any unknown character as <delim-token>
Expand All @@ -19,11 +18,7 @@ describe('delim-token', () => {
[TokenType.Delim, 0, 1],
],
},
]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
] as TokenTest[]))('should tokenize \'$actual\' as \'$as\'', testTokenization);
});

describe("should tokenize '#' as <delim-token> if it isn't followed by a name or a hex digit", () => {
Expand All @@ -49,11 +44,7 @@ describe('delim-token', () => {
[TokenType.Number, 2, 3],
],
},
])("should tokenize '#' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
] as TokenTest[])("should tokenize '#' as <delim-token> in '$actual'", testTokenization);
});

describe("should tokenize '+' as <delim-token> if it isn't a part of a number", () => {
Expand All @@ -79,11 +70,7 @@ describe('delim-token', () => {
[TokenType.Number, 2, 3],
],
},
])("should tokenize '+' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
] as TokenTest[])("should tokenize '+' as <delim-token> in '$actual'", testTokenization);
});

describe("should tokenize '-' as <delim-token> if it isn't a part of a number, CDC or ident", () => {
Expand Down Expand Up @@ -117,11 +104,7 @@ describe('delim-token', () => {
[TokenType.Ident, 2, 3],
],
},
])("should tokenize '-' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
] as TokenTest[])("should tokenize '-' as <delim-token> in '$actual'", testTokenization);
});

describe("should tokenize '.' as <delim-token> if it isn't a part of a number", () => {
Expand All @@ -147,11 +130,7 @@ describe('delim-token', () => {
[TokenType.Number, 2, 3],
],
},
])("should tokenize '.' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
] as TokenTest[])("should tokenize '.' as <delim-token> in '$actual'", testTokenization);
});

describe("should tokenize '<' as <delim-token> if it isn't a part of a CDO", () => {
Expand All @@ -178,11 +157,7 @@ describe('delim-token', () => {
[TokenType.Ident, 3, 5],
],
},
])("should tokenize '<' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
] as TokenTest[])("should tokenize '<' as <delim-token> in '$actual'", testTokenization);
});

describe("should tokenize '@' as <delim-token> if it isn't a part of an at-keyword", () => {
Expand All @@ -208,11 +183,7 @@ describe('delim-token', () => {
[TokenType.Ident, 2, 9],
],
},
])("should tokenize '@' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
] as TokenTest[])("should tokenize '@' as <delim-token> in '$actual'", testTokenization);
});

describe("should tokenize '/' as <delim-token> if it isn't a part of a comment mark", () => {
Expand All @@ -238,11 +209,7 @@ describe('delim-token', () => {
[TokenType.Delim, 2, 3],
],
},
])("should tokenize '/' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
] as TokenTest[])("should tokenize '/' as <delim-token> in '$actual'", testTokenization);
});

describe("should tokenize '\\' as <delim-token> if it isn't a valid escape", () => {
Expand All @@ -254,10 +221,6 @@ describe('delim-token', () => {
[TokenType.Whitespace, 1, 2],
],
},
])("should tokenize '\\' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
] as TokenTest[])("should tokenize '\\' as <delim-token> in '$actual'", testTokenization);
});
});
10 changes: 2 additions & 8 deletions packages/css-tokenizer/test/css-tokenizer/hash-token.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';
import { addAsProp, testTokenization } from '../helpers/test-utils';

describe('hash-token', () => {
test.each(addAsProp([
Expand All @@ -17,9 +15,5 @@ describe('hash-token', () => {
[TokenType.Hash, 0, 10],
],
},
]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
]))('should tokenize \'$actual\' as \'$as\'', testTokenization);
});
10 changes: 2 additions & 8 deletions packages/css-tokenizer/test/css-tokenizer/numeric-token.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';
import { addAsProp, testTokenization } from '../helpers/test-utils';

describe('numeric-token', () => {
// Test data from https://developer.mozilla.org/en-US/docs/Web/CSS/number
Expand Down Expand Up @@ -63,9 +61,5 @@ describe('numeric-token', () => {
[TokenType.Dimension, 0, value.length + 3],
],
}))),
])('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
])('should tokenize \'$actual\' as \'$as\'', testTokenization);
});
10 changes: 2 additions & 8 deletions packages/css-tokenizer/test/css-tokenizer/string-token.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';
import { addAsProp, testTokenization } from '../helpers/test-utils';

describe('string-token and bad-string-token', () => {
test.each(addAsProp([
Expand Down Expand Up @@ -111,9 +109,5 @@ describe('string-token and bad-string-token', () => {
[TokenType.String, 0, 10],
],
},
]))('should tokenize $actual as $as', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
]))('should tokenize $actual as $as', testTokenization);
});
10 changes: 2 additions & 8 deletions packages/css-tokenizer/test/css-tokenizer/trivial-tokens.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';
import { addAsProp, testTokenization } from '../helpers/test-utils';

describe('trivial tokens', () => {
test.each(addAsProp([
Expand Down Expand Up @@ -59,9 +57,5 @@ describe('trivial tokens', () => {
[TokenType.Semicolon, 0, 1],
],
},
]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
]))('should tokenize \'$actual\' as \'$as\'', testTokenization);
});
8 changes: 2 additions & 6 deletions packages/css-tokenizer/test/css-tokenizer/url-token.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { CodePoint } from '../../src/common/enums/code-points';
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';
import { addAsProp, testTokenization } from '../helpers/test-utils';

describe('URLs', () => {
describe('should tokenize valid inputs as <url-token>', () => {
Expand Down Expand Up @@ -126,11 +126,7 @@ describe('URLs', () => {
[TokenType.CloseParenthesis, 24, 25],
],
},
]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
]))('should tokenize \'$actual\' as \'$as\'', testTokenization);
});

describe('should tokenize invalid inputs as <bad-url-token>', () => {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';
import { addAsProp, testTokenization } from '../helpers/test-utils';

describe('whitespace', () => {
test.each(addAsProp([
Expand Down Expand Up @@ -54,9 +52,5 @@ describe('whitespace', () => {
[TokenType.Whitespace, 0, 2],
],
},
]))('should tokenize \'$name\' as \'as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
]))('should tokenize \'$name\' as \'as\'', testTokenization);
});
16 changes: 15 additions & 1 deletion packages/css-tokenizer/test/helpers/test-utils.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import { getFormattedTokenName } from '../../src/utils/token-names';
import { type TokenType } from '../../src/common/enums/token-types';
import { type TokenTest } from './test-interfaces';
import { type TokenData, type TokenTest } from './test-interfaces';
import { tokenize } from '../../src/css-tokenizer';
import { type tokenizeExtended } from '../../src/extended-css-tokenizer';

const SEPARATOR = ', ';

Expand Down Expand Up @@ -29,3 +31,15 @@ export const addAsProp = (tests: TokenTest[]): TokenTest[] => {

return tests;
};

/**
* Helper function to test tokenization, it is enough in most cases.
*
* @param test Token test
* @param fn Tokenizer function
*/
export const testTokenization = (test: TokenTest, fn: typeof tokenize | typeof tokenizeExtended = tokenize): void => {
const tokens: TokenData[] = [];
fn(test.actual, (...args) => tokens.push(args));
expect(tokens).toEqual(test.expected);
};

0 comments on commit 5722aae

Please sign in to comment.