From daa7cdf71185e4ca91aaa1a16556059cd8bd87fb Mon Sep 17 00:00:00 2001 From: George Robinson Date: Sat, 19 Aug 2023 09:55:59 +0100 Subject: [PATCH] Rename Ident to Unquoted --- matchers/parse/lexer.go | 2 +- matchers/parse/lexer_test.go | 44 ++++++++++++++++++------------------ matchers/parse/parse.go | 12 +++++----- matchers/parse/token.go | 6 ++--- 4 files changed, 32 insertions(+), 32 deletions(-) diff --git a/matchers/parse/lexer.go b/matchers/parse/lexer.go index cdf25161e3..559a0192d4 100644 --- a/matchers/parse/lexer.go +++ b/matchers/parse/lexer.go @@ -196,7 +196,7 @@ func (l *Lexer) scanIdent() (Token, error) { break } } - return l.emit(TokenIdent), nil + return l.emit(TokenUnquoted), nil } func (l *Lexer) scanOperator() (Token, error) { diff --git a/matchers/parse/lexer_test.go b/matchers/parse/lexer_test.go index 096adc98eb..989c7d6b80 100644 --- a/matchers/parse/lexer_test.go +++ b/matchers/parse/lexer_test.go @@ -122,10 +122,10 @@ func TestLexer_Scan(t *testing.T) { }, }}, }, { - name: "ident", + name: "unquoted", input: "hello", expected: []Token{{ - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "hello", Position: Position{ OffsetStart: 0, @@ -135,10 +135,10 @@ func TestLexer_Scan(t *testing.T) { }, }}, }, { - name: "ident with underscore", + name: "unquoted with underscore", input: "hello_world", expected: []Token{{ - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "hello_world", Position: Position{ OffsetStart: 0, @@ -148,10 +148,10 @@ func TestLexer_Scan(t *testing.T) { }, }}, }, { - name: "ident with colon", + name: "unquoted with colon", input: "hello:world", expected: []Token{{ - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "hello:world", Position: Position{ OffsetStart: 0, @@ -161,10 +161,10 @@ func TestLexer_Scan(t *testing.T) { }, }}, }, { - name: "ident with numbers", + name: "unquoted with numbers", input: "hello0123456789", expected: []Token{{ - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "hello0123456789", Position: Position{ OffsetStart: 0, @@ -174,10 +174,10 @@ func TestLexer_Scan(t *testing.T) { }, }}, }, { - name: "ident can start with underscore", + name: "unquoted can start with underscore", input: "_hello", expected: []Token{{ - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "_hello", Position: Position{ OffsetStart: 0, @@ -187,10 +187,10 @@ func TestLexer_Scan(t *testing.T) { }, }}, }, { - name: "idents separated with space", + name: "unquoted separated with space", input: "hello world", expected: []Token{{ - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "hello", Position: Position{ OffsetStart: 0, @@ -199,7 +199,7 @@ func TestLexer_Scan(t *testing.T) { ColumnEnd: 5, }, }, { - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "world", Position: Position{ OffsetStart: 6, @@ -364,14 +364,14 @@ func TestLexer_Scan(t *testing.T) { input: "Σ", err: "0:1: Σ: invalid input", }, { - name: "unexpected : at start of ident", + name: "unexpected : at start of unquoted", input: ":hello", err: "0:1: :: invalid input", }, { - name: "unexpected $ in ident", + name: "unexpected $ in unquoted", input: "hello$", expected: []Token{{ - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "hello", Position: Position{ OffsetStart: 0, @@ -382,10 +382,10 @@ func TestLexer_Scan(t *testing.T) { }}, err: "5:6: $: invalid input", }, { - name: "unexpected unicode letter in ident", + name: "unexpected unicode letter in unquoted", input: "helloΣ", expected: []Token{{ - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "hello", Position: Position{ OffsetStart: 0, @@ -396,10 +396,10 @@ func TestLexer_Scan(t *testing.T) { }}, err: "5:6: Σ: invalid input", }, { - name: "unexpected emoji in ident", + name: "unexpected emoji in unquoted", input: "hello🙂", expected: []Token{{ - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "hello", Position: Position{ OffsetStart: 0, @@ -507,7 +507,7 @@ func TestLexer_ScanError(t *testing.T) { func TestLexer_Peek(t *testing.T) { l := NewLexer("hello world") expected1 := Token{ - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "hello", Position: Position{ OffsetStart: 0, @@ -517,7 +517,7 @@ func TestLexer_Peek(t *testing.T) { }, } expected2 := Token{ - Kind: TokenIdent, + Kind: TokenUnquoted, Value: "world", Position: Position{ OffsetStart: 6, diff --git a/matchers/parse/parse.go b/matchers/parse/parse.go index 7871cbf24a..ced9097359 100644 --- a/matchers/parse/parse.go +++ b/matchers/parse/parse.go @@ -186,7 +186,7 @@ func (p *Parser) parseComma(l *Lexer) (parseFn, error) { } // The token after the comma can be another matcher, a close brace or the // end of input. - tok, err := p.expect(l.Peek, TokenCloseBrace, TokenIdent, TokenQuoted) + tok, err := p.expect(l.Peek, TokenCloseBrace, TokenUnquoted, TokenQuoted) if err != nil { if errors.Is(err, ErrEOF) { // If this is the end of input we still need to check if the optional @@ -217,10 +217,10 @@ func (p *Parser) parseLabelMatcher(l *Lexer) (parseFn, error) { ty labels.MatchType ) - // The next token is the label name. This can either be an ident which + // The next token is the label name. This can either be an unquoted which // accepts just [a-zA-Z_] or a quoted which accepts all UTF-8 characters // in double quotes. - if tok, err = p.expect(l.Scan, TokenIdent, TokenQuoted); err != nil { + if tok, err = p.expect(l.Scan, TokenUnquoted, TokenQuoted); err != nil { return nil, fmt.Errorf("%s: %w", err, ErrNoLabelName) } labelName = tok.Value @@ -233,13 +233,13 @@ func (p *Parser) parseLabelMatcher(l *Lexer) (parseFn, error) { panic("Unexpected operator") } - // The next token is the label value. This too can either be an ident + // The next token is the label value. This too can either be an unquoted // which accepts just [a-zA-Z_] or a quoted which accepts all UTF-8 // characters in double quotes. - if tok, err = p.expect(l.Scan, TokenIdent, TokenQuoted); err != nil { + if tok, err = p.expect(l.Scan, TokenUnquoted, TokenQuoted); err != nil { return nil, fmt.Errorf("%s: %s", err, ErrNoLabelValue) } - if tok.Kind == TokenIdent { + if tok.Kind == TokenUnquoted { labelValue = tok.Value } else { labelValue, err = strconv.Unquote(tok.Value) diff --git a/matchers/parse/token.go b/matchers/parse/token.go index ddc5048015..fb532c3083 100644 --- a/matchers/parse/token.go +++ b/matchers/parse/token.go @@ -23,10 +23,10 @@ const ( TokenNone TokenKind = iota TokenCloseBrace TokenComma - TokenIdent TokenOpenBrace TokenOperator TokenQuoted + TokenUnquoted ) func (k TokenKind) String() string { @@ -35,14 +35,14 @@ func (k TokenKind) String() string { return "CloseBrace" case TokenComma: return "Comma" - case TokenIdent: - return "Ident" case TokenOpenBrace: return "OpenBrace" case TokenOperator: return "Op" case TokenQuoted: return "Quoted" + case TokenUnquoted: + return "Unquoted" default: return "None" }