Skip to content

Commit

Permalink
Tests for CSS tokenizer
Browse files Browse the repository at this point in the history
  • Loading branch information
scripthunter7 committed Oct 13, 2023
1 parent 16645a8 commit 6a775df
Show file tree
Hide file tree
Showing 23 changed files with 1,294 additions and 16 deletions.
28 changes: 28 additions & 0 deletions packages/css-tokenizer/test/css-tokenizer/at-keyword-token.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';

describe('at-keyword-token', () => {
test.each(addAsProp([
{
actual: '@import',
expected: [
[TokenType.AtKeyword, 0, 7],
],
},
{
actual: '@charset "utf-8";',
expected: [
[TokenType.AtKeyword, 0, 8],
[TokenType.Whitespace, 8, 9],
[TokenType.String, 9, 16],
[TokenType.Semicolon, 16, 17],
],
},
]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import { type TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';

describe('cdo-token and cdc-token', () => {
test.each(addAsProp([
{
actual: '<!--',
expected: [
[TokenType.Cdo, 0, 4],
],
},
{
actual: '-->',
expected: [
[TokenType.Cdc, 0, 3],
],
},
]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});
54 changes: 54 additions & 0 deletions packages/css-tokenizer/test/css-tokenizer/comment-token.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';

describe('comment-token', () => {
test.each(addAsProp([
// single comment
{
actual: '/* comment */',
expected: [
[TokenType.Comment, 0, 13],
],
},
// multiple comments separated by whitespace
{
actual: '/* comment 1 */ /* comment 2 */',
expected: [
[TokenType.Comment, 0, 15],
[TokenType.Whitespace, 15, 16],
[TokenType.Comment, 16, 31],
],
},
// tokenizer should tolerate missing closing comment mark according to the spec
{
actual: '/* comment',
expected: [
[TokenType.Comment, 0, 10],
],
},
// extra space at the end
{
actual: '/* comment ',
expected: [
[TokenType.Comment, 0, 11],
],
},
// last comment's closing mark is missing
{
actual: '/* comment 1 */ /* comment 2 */ /* comment 3',
expected: [
[TokenType.Comment, 0, 15],
[TokenType.Whitespace, 15, 16],
[TokenType.Comment, 16, 31],
[TokenType.Whitespace, 31, 32],
[TokenType.Comment, 32, 44],
],
},
]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});
263 changes: 263 additions & 0 deletions packages/css-tokenizer/test/css-tokenizer/delim-token.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,263 @@
import { TokenType } from '../../src/common/enums/token-types';
import { tokenize } from '../../src/css-tokenizer';
import type { TokenData } from '../helpers/test-interfaces';
import { addAsProp } from '../helpers/test-utils';

describe('delim-token', () => {
// Tokenize any unknown character as <delim-token>
describe('should tokenize any unknown character as <delim-token>', () => {
test.each(addAsProp([
{
actual: '$',
expected: [
[TokenType.Delim, 0, 1],
],
},
{
actual: '^',
expected: [
[TokenType.Delim, 0, 1],
],
},
]))('should tokenize \'$actual\' as \'$as\'', ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});

describe("should tokenize '#' as <delim-token> if it isn't followed by a name or a hex digit", () => {
test.each([
{
actual: String.raw`#`,
expected: [
[TokenType.Delim, 0, 1],
],
},
{
actual: String.raw`# `,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
],
},
{
actual: String.raw`# 0`,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
[TokenType.Number, 2, 3],
],
},
])("should tokenize '#' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});

describe("should tokenize '+' as <delim-token> if it isn't a part of a number", () => {
test.each([
{
actual: String.raw`+`,
expected: [
[TokenType.Delim, 0, 1],
],
},
{
actual: String.raw`+ `,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
],
},
{
actual: String.raw`+ 1`,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
[TokenType.Number, 2, 3],
],
},
])("should tokenize '+' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});

describe("should tokenize '-' as <delim-token> if it isn't a part of a number, CDC or ident", () => {
test.each([
{
actual: String.raw`-`,
expected: [
[TokenType.Delim, 0, 1],
],
},
{
actual: String.raw`- `,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
],
},
{
actual: String.raw`- 1`,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
[TokenType.Number, 2, 3],
],
},
{
actual: String.raw`- a`,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
[TokenType.Ident, 2, 3],
],
},
])("should tokenize '-' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});

describe("should tokenize '.' as <delim-token> if it isn't a part of a number", () => {
test.each([
{
actual: String.raw`.`,
expected: [
[TokenType.Delim, 0, 1],
],
},
{
actual: String.raw`. `,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
],
},
{
actual: String.raw`. 1`,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
[TokenType.Number, 2, 3],
],
},
])("should tokenize '.' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});

describe("should tokenize '<' as <delim-token> if it isn't a part of a CDO", () => {
test.each([
{
actual: String.raw`<`,
expected: [
[TokenType.Delim, 0, 1],
],
},
{
actual: String.raw`< `,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
],
},
{
actual: String.raw`< !--`,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
[TokenType.Delim, 2, 3],
[TokenType.Ident, 3, 5],
],
},
])("should tokenize '<' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});

describe("should tokenize '@' as <delim-token> if it isn't a part of an at-keyword", () => {
test.each([
{
actual: String.raw`@`,
expected: [
[TokenType.Delim, 0, 1],
],
},
{
actual: String.raw`@ `,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
],
},
{
actual: String.raw`@ charset`,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
[TokenType.Ident, 2, 9],
],
},
])("should tokenize '@' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});

describe("should tokenize '/' as <delim-token> if it isn't a part of a comment mark", () => {
test.each([
{
actual: String.raw`/`,
expected: [
[TokenType.Delim, 0, 1],
],
},
{
actual: String.raw`/ `,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
],
},
{
actual: String.raw`/ *`,
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
[TokenType.Delim, 2, 3],
],
},
])("should tokenize '/' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});

describe("should tokenize '\\' as <delim-token> if it isn't a valid escape", () => {
test.each([
{
actual: '\\\n',
expected: [
[TokenType.Delim, 0, 1],
[TokenType.Whitespace, 1, 2],
],
},
])("should tokenize '\\' as <delim-token> in '$actual'", ({ actual, expected }) => {
const tokens: TokenData[] = [];
tokenize(actual, (...args) => tokens.push(args));
expect(tokens).toEqual(expected);
});
});
});
25 changes: 25 additions & 0 deletions packages/css-tokenizer/test/css-tokenizer/errors.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { ErrorMessage } from '../../src/common/enums/error-messages';
import { tokenize } from '../../src/css-tokenizer';
import type { ErrorData } from '../helpers/test-interfaces';

describe('onError callback', () => {
test.each([
{
actual: '\\\n',
expected: [
[ErrorMessage.InvalidEscapeSequence, 0, 1],
],
},
{
actual: '/** unclosed comment',
expected: [
[ErrorMessage.UnterminatedComment, 0, 18],
],
},
// FIXME: Test all possible errors
])('should report error for \'$actual\'', ({ actual, expected }) => {
const errors: ErrorData[] = [];
tokenize(actual, () => {}, (...args) => errors.push(args));
expect(errors).toEqual(expected);
});
});
Loading

0 comments on commit 6a775df

Please sign in to comment.