diff --git a/lib/selector/parser/ast.go b/lib/selector/parser/ast.go index 58f8b6b7a..1e493e614 100644 --- a/lib/selector/parser/ast.go +++ b/lib/selector/parser/ast.go @@ -15,31 +15,38 @@ package parser import ( - _ "crypto/sha256" + _ "crypto/sha256" // register hash func "strings" "github.com/projectcalico/libcalico-go/lib/hash" ) +// Labels defines the interface of labels that can be used by selector type Labels interface { + // Get returns value and presence of the given labelName Get(labelName string) (value string, present bool) } +// MapAsLabels allows you use map as labels type MapAsLabels map[string]string +// Get returns the value and presence of the given labelName key in the MapAsLabels func (l MapAsLabels) Get(labelName string) (value string, present bool) { value, present = l[labelName] return } +// Selector represents a label selector. type Selector interface { // Evaluate evaluates the selector against the given labels expressed as a concrete map. Evaluate(labels map[string]string) bool // EvaluateLabels evaluates the selector against the given labels expressed as an interface. // This allows for labels that are calculated on the fly. EvaluateLabels(labels Labels) bool + // String returns a string that represents this selector. String() string - UniqueId() string + // UniqueID returns the unique ID that represents this selector. + UniqueID() string } type selectorRoot struct { @@ -65,7 +72,7 @@ func (sel selectorRoot) String() string { return *sel.cachedString } -func (sel selectorRoot) UniqueId() string { +func (sel selectorRoot) UniqueID() string { if sel.cachedHash == nil { hash := hash.MakeUniqueID("s", sel.String()) sel.cachedHash = &hash @@ -86,11 +93,11 @@ type LabelEqValueNode struct { } func (node LabelEqValueNode) Evaluate(labels Labels) bool { - if val, ok := labels.Get(node.LabelName); ok { + val, ok := labels.Get(node.LabelName) + if ok { return val == node.Value - } else { - return false } + return false } func (node LabelEqValueNode) collectFragments(fragments []string) []string { @@ -109,11 +116,11 @@ type LabelInSetNode struct { } func (node LabelInSetNode) Evaluate(labels Labels) bool { - if val, ok := labels.Get(node.LabelName); ok { + val, ok := labels.Get(node.LabelName) + if ok { return node.Value.Contains(val) - } else { - return false } + return false } func (node LabelInSetNode) collectFragments(fragments []string) []string { @@ -126,11 +133,11 @@ type LabelNotInSetNode struct { } func (node LabelNotInSetNode) Evaluate(labels Labels) bool { - if val, ok := labels.Get(node.LabelName); ok { + val, ok := labels.Get(node.LabelName) + if ok { return !node.Value.Contains(val) - } else { - return true } + return true } func (node LabelNotInSetNode) collectFragments(fragments []string) []string { @@ -166,11 +173,11 @@ type LabelNeValueNode struct { } func (node LabelNeValueNode) Evaluate(labels Labels) bool { - if val, ok := labels.Get(node.LabelName); ok { + val, ok := labels.Get(node.LabelName) + if ok { return val != node.Value - } else { - return true } + return true } func (node LabelNeValueNode) collectFragments(fragments []string) []string { @@ -188,11 +195,11 @@ type HasNode struct { } func (node HasNode) Evaluate(labels Labels) bool { - if _, ok := labels.Get(node.LabelName); ok { + _, ok := labels.Get(node.LabelName) + if ok { return true - } else { - return false } + return false } func (node HasNode) collectFragments(fragments []string) []string { diff --git a/lib/selector/parser/parser.go b/lib/selector/parser/parser.go index 5e356b3cd..2ee00c93e 100644 --- a/lib/selector/parser/parser.go +++ b/lib/selector/parser/parser.go @@ -24,14 +24,14 @@ import ( const parserDebug = false -// Parse a string representation of a selector expression into a Selector. +// Parse parses a string representation of a selector expression into a Selector. func Parse(selector string) (sel Selector, err error) { log.Debugf("Parsing %#v", selector) tokens, err := tokenizer.Tokenize(selector) if err != nil { return } - if tokens[0].Kind == tokenizer.TokEof { + if tokens[0].Kind == tokenizer.TokEOF { return selectorRoot{root: AllNode{}}, nil } log.Debugf("Tokens %v", tokens) diff --git a/lib/selector/parser/parser_test.go b/lib/selector/parser/parser_test.go index cf1bb3ae7..7111c5835 100644 --- a/lib/selector/parser/parser_test.go +++ b/lib/selector/parser/parser_test.go @@ -221,7 +221,7 @@ var _ = Describe("Parser", func() { } }) It("should give same UID on each call", func() { - Expect(sel.UniqueId()).To(Equal(sel.UniqueId())) + Expect(sel.UniqueID()).To(Equal(sel.UniqueID())) }) }) } @@ -245,8 +245,8 @@ var _ = Describe("Parser", func() { roundTripped, err := parser.Parse(canon) Expect(err).To(BeNil()) Expect(roundTripped.String()).To(Equal(canon)) - uid := sel.UniqueId() - Expect(roundTripped.UniqueId()).To(Equal(uid)) + uid := sel.UniqueID() + Expect(roundTripped.UniqueID()).To(Equal(uid)) }) } @@ -258,9 +258,9 @@ var _ = Describe("Parser", func() { It(fmt.Sprintf("should calculate the correct UID for %s", test.input), func() { sel, err := parser.Parse(test.input) Expect(err).To(BeNil()) - Expect(sel.UniqueId()).To(Equal(test.expectedUid), + Expect(sel.UniqueID()).To(Equal(test.expectedUid), "incorrect UID for "+test.input) - Expect(sel.UniqueId()).To(Equal(sel.UniqueId()), + Expect(sel.UniqueID()).To(Equal(sel.UniqueID()), "inconsistent UID for "+test.input) }) } diff --git a/lib/selector/parser/stringset.go b/lib/selector/parser/stringset.go index 8238ac811..11e208b01 100644 --- a/lib/selector/parser/stringset.go +++ b/lib/selector/parser/stringset.go @@ -18,6 +18,7 @@ import "sort" type StringSet []string +// Contains returns true if a given string in current set func (ss StringSet) Contains(s string) bool { if len(ss) == 0 { // Empty set or nil. @@ -61,7 +62,7 @@ func ConvertToStringSetInPlace(s []string) StringSet { continue } s[j] = v - j += 1 + j++ last = v } s = s[:j] diff --git a/lib/selector/selector.go b/lib/selector/selector.go index 165a99bfc..a66a47b3b 100644 --- a/lib/selector/selector.go +++ b/lib/selector/selector.go @@ -16,6 +16,7 @@ package selector import "github.com/projectcalico/libcalico-go/lib/selector/parser" +// Selector represents a label selector. type Selector interface { Evaluate(labels map[string]string) bool EvaluateLabels(labels parser.Labels) bool diff --git a/lib/selector/tokenizer/tokenizer.go b/lib/selector/tokenizer/tokenizer.go index e1e9bd8e9..a8a65cd16 100644 --- a/lib/selector/tokenizer/tokenizer.go +++ b/lib/selector/tokenizer/tokenizer.go @@ -41,13 +41,14 @@ const ( TokRParen TokAnd TokOr - TokEof + TokEOF ) const tokenizerDebug = false var whitespace = " \t" +// Token has a kind and a value type Token struct { Kind tokenKind Value interface{} @@ -70,6 +71,7 @@ var ( inRegex = regexp.MustCompile("^" + inExpr) ) +// Tokenize transforms string to token slice func Tokenize(input string) (tokens []Token, err error) { for { if tokenizerDebug { @@ -78,7 +80,7 @@ func Tokenize(input string) (tokens []Token, err error) { startLen := len(input) input = strings.TrimLeft(input, whitespace) if len(input) == 0 { - tokens = append(tokens, Token{TokEof, nil}) + tokens = append(tokens, Token{TokEOF, nil}) return } switch input[0] { diff --git a/lib/selector/tokenizer/tokenizer_test.go b/lib/selector/tokenizer/tokenizer_test.go index d645b4394..86aae500e 100644 --- a/lib/selector/tokenizer/tokenizer_test.go +++ b/lib/selector/tokenizer/tokenizer_test.go @@ -31,20 +31,20 @@ var tokenTests = []struct { {tokenizer.TokLabel, "a"}, {tokenizer.TokEq, nil}, {tokenizer.TokStringLiteral, "b"}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`a=="b"`, []tokenizer.Token{ {tokenizer.TokLabel, "a"}, {tokenizer.TokEq, nil}, {tokenizer.TokStringLiteral, "b"}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`label == "value"`, []tokenizer.Token{ {tokenizer.TokLabel, "label"}, {tokenizer.TokEq, nil}, {tokenizer.TokStringLiteral, "value"}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`a not in "bar" && !has(foo) || b in c`, []tokenizer.Token{ {tokenizer.TokLabel, "a"}, @@ -57,19 +57,19 @@ var tokenTests = []struct { {tokenizer.TokLabel, "b"}, {tokenizer.TokIn, nil}, {tokenizer.TokLabel, "c"}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`has(calico/k8s_ns)`, []tokenizer.Token{ {tokenizer.TokHas, "calico/k8s_ns"}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`has(calico/k8s_ns/role)`, []tokenizer.Token{ {tokenizer.TokHas, "calico/k8s_ns/role"}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`has(calico/k8s_NS-.1/role)`, []tokenizer.Token{ {tokenizer.TokHas, "calico/k8s_NS-.1/role"}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`calico/k8s_ns == "kube-system" && k8s-app == "kube-dns"`, []tokenizer.Token{ {tokenizer.TokLabel, "calico/k8s_ns"}, @@ -79,7 +79,7 @@ var tokenTests = []struct { {tokenizer.TokLabel, "k8s-app"}, {tokenizer.TokEq, nil}, {tokenizer.TokStringLiteral, "kube-dns"}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`a not in "bar" && ! has( foo ) || b in c `, []tokenizer.Token{ {tokenizer.TokLabel, "a"}, @@ -92,7 +92,7 @@ var tokenTests = []struct { {tokenizer.TokLabel, "b"}, {tokenizer.TokIn, nil}, {tokenizer.TokLabel, "c"}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`a notin"bar"&&!has(foo)||b in"c"`, []tokenizer.Token{ {tokenizer.TokLabel, "a"}, @@ -105,14 +105,14 @@ var tokenTests = []struct { {tokenizer.TokLabel, "b"}, {tokenizer.TokIn, nil}, {tokenizer.TokStringLiteral, "c"}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`a not in {}`, []tokenizer.Token{ {tokenizer.TokLabel, "a"}, {tokenizer.TokNotIn, nil}, {tokenizer.TokLBrace, nil}, {tokenizer.TokRBrace, nil}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`a not in {"a"}`, []tokenizer.Token{ {tokenizer.TokLabel, "a"}, @@ -120,7 +120,7 @@ var tokenTests = []struct { {tokenizer.TokLBrace, nil}, {tokenizer.TokStringLiteral, "a"}, {tokenizer.TokRBrace, nil}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, {`a not in {"a","B"}`, []tokenizer.Token{ {tokenizer.TokLabel, "a"}, @@ -130,7 +130,7 @@ var tokenTests = []struct { {tokenizer.TokComma, nil}, {tokenizer.TokStringLiteral, "B"}, {tokenizer.TokRBrace, nil}, - {tokenizer.TokEof, nil}, + {tokenizer.TokEOF, nil}, }}, }