Merge pull request #4835 from sdminonne/issue_341_step_2

moving ParseSelector to Parse for labels only.
This commit is contained in:
Brian Grant 2015-03-05 14:29:22 -08:00
commit 2902028476
12 changed files with 189 additions and 151 deletions

View File

@ -31,7 +31,7 @@ kube::test::get_object_assert() {
local request=$2 local request=$2
local expected=$3 local expected=$3
res=$(kubectl get "${kube_flags[@]}" $object -o template -t "$request") res=$(eval kubectl get "${kube_flags[@]}" $object -o template -t "$request")
if [[ "$res" =~ ^$expected$ ]]; then if [[ "$res" =~ ^$expected$ ]]; then
echo -n ${green} echo -n ${green}

View File

@ -194,11 +194,11 @@ for version in "${kube_api_versions[@]}"; do
### Delete POD valid-pod with label ### Delete POD valid-pod with label
# Pre-condition: valid-pod POD is running # Pre-condition: valid-pod POD is running
kube::test::get_object_assert 'pods -l name=valid-pod' "{{range.items}}{{.$id_field}}:{{end}}" 'valid-pod:' kube::test::get_object_assert "pods -l'name in (valid-pod)'" '{{range.items}}{{.$id_field}}:{{end}}' 'valid-pod:'
# Command # Command
kubectl delete pods -l name=valid-pod "${kube_flags[@]}" kubectl delete pods -l'name in (valid-pod)' "${kube_flags[@]}"
# Post-condition: no POD is running # Post-condition: no POD is running
kube::test::get_object_assert 'pods -l name=valid-pod' "{{range.items}}{{.$id_field}}:{{end}}" '' kube::test::get_object_assert "pods -l'name in (valid-pod)'" '{{range.items}}{{.$id_field}}:{{end}}' ''
### Create POD valid-pod from JSON ### Create POD valid-pod from JSON
# Pre-condition: no POD is running # Pre-condition: no POD is running
@ -220,7 +220,7 @@ for version in "${kube_api_versions[@]}"; do
# Pre-condition: valid-pod POD is running # Pre-condition: valid-pod POD is running
kube::test::get_object_assert pods "{{range.items}}{{.$id_field}}:{{end}}" 'valid-pod:' kube::test::get_object_assert pods "{{range.items}}{{.$id_field}}:{{end}}" 'valid-pod:'
# Command # Command
! kubectl delete --all pods -l name=valid-pod "${kube_flags[@]}" ! kubectl delete --all pods -l'name in (valid-pod)' "${kube_flags[@]}"
# Post-condition: valid-pod POD is running # Post-condition: valid-pod POD is running
kube::test::get_object_assert pods "{{range.items}}{{.$id_field}}:{{end}}" 'valid-pod:' kube::test::get_object_assert pods "{{range.items}}{{.$id_field}}:{{end}}" 'valid-pod:'
@ -230,7 +230,7 @@ for version in "${kube_api_versions[@]}"; do
# Command # Command
kubectl delete --all pods "${kube_flags[@]}" # --all remove all the pods kubectl delete --all pods "${kube_flags[@]}" # --all remove all the pods
# Post-condition: no POD is running # Post-condition: no POD is running
kube::test::get_object_assert 'pods -l name=valid-pod' "{{range.items}}{{.$id_field}}:{{end}}" '' kube::test::get_object_assert "pods -l'name in (valid-pod)'" '{{range.items}}{{.$id_field}}:{{end}}' ''
### Create two PODs ### Create two PODs
# Pre-condition: no POD is running # Pre-condition: no POD is running
@ -318,7 +318,7 @@ for version in "${kube_api_versions[@]}"; do
# Pre-condition: valid-pod POD is running # Pre-condition: valid-pod POD is running
kube::test::get_object_assert pods "{{range.items}}{{.$id_field}}:{{end}}" 'valid-pod:' kube::test::get_object_assert pods "{{range.items}}{{.$id_field}}:{{end}}" 'valid-pod:'
# Command # Command
kubectl delete pods -lname=valid-pod-super-sayan "${kube_flags[@]}" kubectl delete pods -l'name in (valid-pod-super-sayan)' "${kube_flags[@]}"
# Post-condition: no POD is running # Post-condition: no POD is running
kube::test::get_object_assert pods "{{range.items}}{{.$id_field}}:{{end}}" '' kube::test::get_object_assert pods "{{range.items}}{{.$id_field}}:{{end}}" ''

View File

@ -81,7 +81,7 @@ func GetResource(r RESTGetter, ctxFn ContextFunc, namer ScopeNamer, codec runtim
} }
func parseSelectorQueryParams(query url.Values, version, apiResource string) (label, field labels.Selector, err error) { func parseSelectorQueryParams(query url.Values, version, apiResource string) (label, field labels.Selector, err error) {
label, err = labels.ParseSelector(query.Get("labels")) label, err = labels.Parse(query.Get("labels"))
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }

View File

@ -30,7 +30,7 @@ import (
) )
func parseSelectorOrDie(s string) labels.Selector { func parseSelectorOrDie(s string) labels.Selector {
selector, err := labels.ParseSelector(s) selector, err := labels.Parse(s)
if err != nil { if err != nil {
panic(err) panic(err)
} }

View File

@ -139,8 +139,10 @@ func (c *testClient) ValidateCommon(t *testing.T, err error) {
validator, ok := c.QueryValidator[key] validator, ok := c.QueryValidator[key]
if !ok { if !ok {
switch key { switch key {
case "labels", "fields": case "labels":
validator = validateLabels validator = validateLabels
case "fields":
validator = validateFields
default: default:
validator = func(a, b string) bool { return a == b } validator = func(a, b string) bool { return a == b }
} }
@ -227,6 +229,18 @@ func TestListPods(t *testing.T) {
} }
func validateLabels(a, b string) bool { func validateLabels(a, b string) bool {
sA, eA := labels.Parse(a)
if eA != nil {
return false
}
sB, eB := labels.Parse(b)
if eB != nil {
return false
}
return sA.String() == sB.String()
}
func validateFields(a, b string) bool {
sA, _ := labels.ParseSelector(a) sA, _ := labels.ParseSelector(a)
sB, _ := labels.ParseSelector(b) sB, _ := labels.ParseSelector(b)
return sA.String() == sB.String() return sA.String() == sB.String()

View File

@ -236,7 +236,16 @@ func (r *Request) ParseSelectorParam(paramName, item string) *Request {
if r.err != nil { if r.err != nil {
return r return r
} }
sel, err := labels.ParseSelector(item) var sel labels.Selector
var err error
switch paramName {
case "labels":
sel, err = labels.Parse(item)
case "fields":
sel, err = labels.ParseSelector(item)
default:
err = fmt.Errorf("unknown parameter name '%s'", paramName)
}
if err != nil { if err != nil {
r.err = err r.err = err
return r return r

View File

@ -142,7 +142,7 @@ func TestRequestSetTwiceError(t *testing.T) {
} }
func TestRequestParseSelectorParam(t *testing.T) { func TestRequestParseSelectorParam(t *testing.T) {
r := (&Request{}).ParseSelectorParam("foo", "a") r := (&Request{}).ParseSelectorParam("foo", "a=")
if r.err == nil || r.params != nil { if r.err == nil || r.params != nil {
t.Errorf("should have set err and left params nil: %#v", r) t.Errorf("should have set err and left params nil: %#v", r)
} }

View File

@ -164,7 +164,7 @@ func (b *Builder) ResourceTypes(types ...string) *Builder {
// This will not affect files loaded from disk or URL. If the parameter is empty it is // This will not affect files loaded from disk or URL. If the parameter is empty it is
// a no-op - to select all resources invoke `b.Selector(labels.Everything)`. // a no-op - to select all resources invoke `b.Selector(labels.Everything)`.
func (b *Builder) SelectorParam(s string) *Builder { func (b *Builder) SelectorParam(s string) *Builder {
selector, err := labels.ParseSelector(s) selector, err := labels.Parse(s)
if err != nil { if err != nil {
b.errs = append(b.errs, fmt.Errorf("the provided selector %q is not valid: %v", s, err)) b.errs = append(b.errs, fmt.Errorf("the provided selector %q is not valid: %v", s, err))
return b return b

View File

@ -193,7 +193,7 @@ func NewRequirement(key string, op Operator, vals util.StringSet) (*Requirement,
switch op { switch op {
case InOperator, NotInOperator: case InOperator, NotInOperator:
if len(vals) == 0 { if len(vals) == 0 {
return nil, fmt.Errorf("for In,NotIn operators, values set can't be empty") return nil, fmt.Errorf("for 'in', 'notin' operators, values set can't be empty")
} }
case EqualsOperator, DoubleEqualsOperator, NotEqualsOperator: case EqualsOperator, DoubleEqualsOperator, NotEqualsOperator:
if len(vals) != 1 { if len(vals) != 1 {
@ -336,31 +336,30 @@ func (lsel LabelSelector) String() string {
type Token int type Token int
const ( const (
ERROR Token = iota ErrorToken Token = iota
EOS // end of string EndOfStringToken
CPAR ClosedParToken
COMMA CommaToken
EEQUAL DoubleEqualsToken
EQUAL EqualsToken
IDENTIFIER IdentifierToken // to represent keys and values
IN InToken
NEQUAL NotEqualsToken
NOTIN NotInToken
OPAR OpenParToken
OR
) )
// string2token contains the mapping between lexer Token and token literal // string2token contains the mapping between lexer Token and token literal
// (except IDENTIFIER, EOS and ERROR since it makes no sense) // (except IdentifierToken, EndOfStringToken and ErrorToken since it makes no sense)
var string2token = map[string]Token{ var string2token = map[string]Token{
")": CPAR, ")": ClosedParToken,
",": COMMA, ",": CommaToken,
"==": EEQUAL, "==": DoubleEqualsToken,
"=": EQUAL, "=": EqualsToken,
"in": IN, "in": InToken,
"!=": NEQUAL, "!=": NotEqualsToken,
"notin": NOTIN, "notin": NotInToken,
"(": OPAR, "(": OpenParToken,
} }
// The item produced by the lexer. It contains the Token and the literal. // The item produced by the lexer. It contains the Token and the literal.
@ -383,11 +382,12 @@ func isSpecialSymbol(ch byte) bool {
return false return false
} }
// Lexer struct // Lexer represents the Lexer struct for label selector.
// It contains necessary informationt to tokenize the input string
type Lexer struct { type Lexer struct {
// s stores the string to be lexed // s stores the string to be tokenized
s string s string
// pos is the position currently lexed // pos is the position currently tokenized
pos int pos int
} }
@ -402,21 +402,23 @@ func (l *Lexer) read() (b byte) {
return b return b
} }
// no return simply unread // unread 'undoes' the last read character
func (l *Lexer) unread() { func (l *Lexer) unread() {
l.pos-- l.pos--
} }
// func return a literal token (for example IN) and or an identifier. // scanIdOrKeyword scans string to recognize literal token (for example 'in') or an identifier.
func (l *Lexer) scanIdOrKeyword() (tok Token, lit string) { func (l *Lexer) scanIdOrKeyword() (tok Token, lit string) {
var buffer []byte var buffer []byte
IdentifierLoop:
for { for {
if ch := l.read(); ch == 0 { // end of string found switch ch := l.read(); {
break case ch == 0:
} else if isSpecialSymbol(ch) || isWhitespace(ch) { break IdentifierLoop
l.unread() // stop scanning and unread case isSpecialSymbol(ch) || isWhitespace(ch):
break l.unread()
} else { break IdentifierLoop
default:
buffer = append(buffer, ch) buffer = append(buffer, ch)
} }
} }
@ -424,64 +426,75 @@ func (l *Lexer) scanIdOrKeyword() (tok Token, lit string) {
if val, ok := string2token[s]; ok { // is a literal token? if val, ok := string2token[s]; ok { // is a literal token?
return val, s return val, s
} }
return IDENTIFIER, s // otherwise is an identifier return IdentifierToken, s // otherwise is an identifier
} }
// scan string starting with specail symbol. At the moment this special symbols // scanSpecialSymbol scans string starting with special symbol.
// identify not literal operators // special symbol identify non literal operators. "!=", "==", "="
func (l *Lexer) scanSpecialSymbol() (Token, string) { func (l *Lexer) scanSpecialSymbol() (Token, string) {
lastScannedItem := ScannedItem{} lastScannedItem := ScannedItem{}
var buffer []byte var buffer []byte
SpecialSymbolLoop:
for { for {
if ch := l.read(); ch == 0 { switch ch := l.read(); {
break case ch == 0:
} else if isSpecialSymbol(ch) { break SpecialSymbolLoop
case isSpecialSymbol(ch):
buffer = append(buffer, ch) buffer = append(buffer, ch)
if token, ok := string2token[string(buffer)]; ok { if token, ok := string2token[string(buffer)]; ok {
lastScannedItem = ScannedItem{tok: token, literal: string(buffer)} lastScannedItem = ScannedItem{tok: token, literal: string(buffer)}
} else if lastScannedItem.tok != 0 { } else if lastScannedItem.tok != 0 {
l.unread() l.unread()
break break SpecialSymbolLoop
} }
} else { // in any other cases (identifer or whitespace) stop default:
l.unread() l.unread()
break break SpecialSymbolLoop
} }
} }
if lastScannedItem.tok == 0 { if lastScannedItem.tok == 0 {
return ERROR, fmt.Sprintf("error expected keyword found '%s'", buffer) return ErrorToken, fmt.Sprintf("error expected: keyword found '%s'", buffer)
} else { }
return lastScannedItem.tok, lastScannedItem.literal return lastScannedItem.tok, lastScannedItem.literal
} }
}
// func Lex return Token and the literal (meaningfull only in case of IDENTIFIER) // skipWhiteSpaces consumes all blank characters
func (l *Lexer) Lex() (tok Token, lit string) { // returning the first non blank character
ch := l.read() func (l *Lexer) skipWhiteSpaces(ch byte) byte {
for { // consume spaces until no more spaces for {
if !isWhitespace(ch) { if !isWhitespace(ch) {
break return ch
} }
ch = l.read() ch = l.read()
} }
if ch == 0 { // end of the string? }
return EOS, ""
} else if isSpecialSymbol(ch) { // Lex returns a pair of Token and the literal
// literal is meaningfull only for IdentifierToken token
func (l *Lexer) Lex() (tok Token, lit string) {
switch ch := l.skipWhiteSpaces(l.read()); {
case ch == 0:
return EndOfStringToken, ""
case isSpecialSymbol(ch):
l.unread() l.unread()
return l.scanSpecialSymbol() // can be an operator return l.scanSpecialSymbol()
} else { default:
l.unread() l.unread()
return l.scanIdOrKeyword() return l.scanIdOrKeyword()
} }
} }
// Parser data structure contains the label selector parser data and algos // Parser data structure contains the label selector parser data strucutre
type Parser struct { type Parser struct {
l *Lexer l *Lexer
scannedItems []ScannedItem scannedItems []ScannedItem
position int position int
} }
// Parser context represents context during parsing:
// some literal for example 'in' and 'notin' can be
// recognized as operator for example 'x in (a)' but
// it can be recognized as value for example 'value in (in)'
type ParserContext int type ParserContext int
const ( const (
@ -494,40 +507,40 @@ func (p *Parser) lookahead(context ParserContext) (Token, string) {
tok, lit := p.scannedItems[p.position].tok, p.scannedItems[p.position].literal tok, lit := p.scannedItems[p.position].tok, p.scannedItems[p.position].literal
if context == Values { if context == Values {
switch tok { switch tok {
case IN, NOTIN: case InToken, NotInToken:
tok = IDENTIFIER tok = IdentifierToken
} }
} }
return tok, lit return tok, lit
} }
// return current token and string. Increments the the position // consume returns current token and string. Increments the the position
func (p *Parser) consume(context ParserContext) (Token, string) { func (p *Parser) consume(context ParserContext) (Token, string) {
p.position++ p.position++
tok, lit := p.scannedItems[p.position-1].tok, p.scannedItems[p.position-1].literal tok, lit := p.scannedItems[p.position-1].tok, p.scannedItems[p.position-1].literal
if context == Values { if context == Values {
switch tok { switch tok {
case IN, NOTIN: case InToken, NotInToken:
tok = IDENTIFIER tok = IdentifierToken
} }
} }
return tok, lit return tok, lit
} }
// scan method scan all the input string and storin <token, literal> pairs in // scan runs through the input string and stores the ScannedItem in an array
// scanned items slice. // Parser can now lookahead and consume the tokens
// The Parser can now lookahead and consume the tokens
func (p *Parser) scan() { func (p *Parser) scan() {
for { for {
token, literal := p.l.Lex() token, literal := p.l.Lex()
p.scannedItems = append(p.scannedItems, ScannedItem{token, literal}) p.scannedItems = append(p.scannedItems, ScannedItem{token, literal})
if token == EOS { if token == EndOfStringToken {
break break
} }
} }
} }
// the entry function to parse list of requirements // parse runs the left recursive descending algorithm
// on input string. It returns a list of Requirement objects.
func (p *Parser) parse() ([]Requirement, error) { func (p *Parser) parse() ([]Requirement, error) {
p.scan() // init scannedItems p.scan() // init scannedItems
@ -535,34 +548,33 @@ func (p *Parser) parse() ([]Requirement, error) {
for { for {
tok, lit := p.lookahead(Values) tok, lit := p.lookahead(Values)
switch tok { switch tok {
case IDENTIFIER: case IdentifierToken:
r, err := p.parseRequirement() r, err := p.parseRequirement()
if err != nil { if err != nil {
return nil, fmt.Errorf("Error: ", err) return nil, fmt.Errorf("unable to parse requiremnt: ", err)
} }
requirements = append(requirements, *r) requirements = append(requirements, *r)
t, l := p.consume(Values) t, l := p.consume(Values)
switch t { switch t {
case EOS: case EndOfStringToken:
return requirements, nil return requirements, nil
case COMMA: case CommaToken:
t2, l2 := p.lookahead(Values) t2, l2 := p.lookahead(Values)
if t2 != IDENTIFIER { if t2 != IdentifierToken {
return nil, fmt.Errorf("Expected identifier after comma, found '%s'", l2) return nil, fmt.Errorf("found '%s', expected: identifier after ','", l2)
} }
default: default:
return nil, fmt.Errorf("Bad value '%s', expetected comma or 'end of string'", l) return nil, fmt.Errorf("found '%s', expected: ',' or 'end of string'", l)
} }
case EOS: case EndOfStringToken:
return requirements, nil return requirements, nil
default: default:
return nil, fmt.Errorf("Bad value %s. Expected identifier or 'end of string'", lit) return nil, fmt.Errorf("found '%s', expected: identifier or 'end of string'", lit)
} }
} }
return requirements, nil return requirements, nil
} }
// parse a Requirement data structure
func (p *Parser) parseRequirement() (*Requirement, error) { func (p *Parser) parseRequirement() (*Requirement, error) {
key, operator, err := p.parseKeyAndInferOperator() key, operator, err := p.parseKeyAndInferOperator()
if err != nil { if err != nil {
@ -590,17 +602,19 @@ func (p *Parser) parseRequirement() (*Requirement, error) {
} }
// parseKeyAndInferOperator parse literals. // parseKeyAndInferOperator parse literals.
// in case of no operator 'in, notin, ==, =, !=' are found
// the 'exists' operattor is inferred
func (p *Parser) parseKeyAndInferOperator() (string, Operator, error) { func (p *Parser) parseKeyAndInferOperator() (string, Operator, error) {
tok, literal := p.consume(Values) tok, literal := p.consume(Values)
if tok != IDENTIFIER { if tok != IdentifierToken {
err := fmt.Errorf("Found '%s' instead of expected IDENTIFIER", literal) err := fmt.Errorf("found '%s', expected: identifier", literal)
return "", "", err return "", "", err
} }
if err := validateLabelKey(literal); err != nil { if err := validateLabelKey(literal); err != nil {
return "", "", err return "", "", err
} }
var operator Operator var operator Operator
if t, _ := p.lookahead(Values); t == EOS || t == COMMA { if t, _ := p.lookahead(Values); t == EndOfStringToken || t == CommaToken {
operator = ExistsOperator operator = ExistsOperator
} }
return literal, operator, nil return literal, operator, nil
@ -611,95 +625,95 @@ func (p *Parser) parseKeyAndInferOperator() (string, Operator, error) {
func (p *Parser) parseOperator() (op Operator, err error) { func (p *Parser) parseOperator() (op Operator, err error) {
tok, lit := p.consume(KeyAndOperator) tok, lit := p.consume(KeyAndOperator)
switch tok { switch tok {
case IN: case InToken:
op = InOperator op = InOperator
case EQUAL: case EqualsToken:
op = EqualsOperator op = EqualsOperator
case EEQUAL: case DoubleEqualsToken:
op = DoubleEqualsOperator op = DoubleEqualsOperator
case NOTIN: case NotInToken:
op = NotInOperator op = NotInOperator
case NEQUAL: case NotEqualsToken:
op = NotEqualsOperator op = NotEqualsOperator
default: default:
return "", fmt.Errorf("Expected '=', '!=', '==', 'in', notin', found %s", lit) return "", fmt.Errorf("found '%s', expected: '=', '!=', '==', 'in', notin'", lit)
} }
return op, nil return op, nil
} }
// parse values parse the values for set based matching (x,y,z) // parseValues parses the values for set based matching (x,y,z)
func (p *Parser) parseValues() (util.StringSet, error) { func (p *Parser) parseValues() (util.StringSet, error) {
tok, lit := p.consume(Values) tok, lit := p.consume(Values)
if tok != OPAR { if tok != OpenParToken {
return nil, fmt.Errorf("Found '%s' expected '('", lit) return nil, fmt.Errorf("found '%s' expected: '('", lit)
} }
tok, lit = p.lookahead(Values) tok, lit = p.lookahead(Values)
switch tok { switch tok {
case IDENTIFIER, COMMA: case IdentifierToken, CommaToken:
s, err := p.parseIdentifiersList() // handles general cases s, err := p.parseIdentifiersList() // handles general cases
if err != nil { if err != nil {
return s, err return s, err
} }
if tok, _ = p.consume(Values); tok != CPAR { if tok, _ = p.consume(Values); tok != ClosedParToken {
return nil, fmt.Errorf("Expected a ')', found '%s'", lit) return nil, fmt.Errorf("found '%s', expected: ')'", lit)
} }
return s, nil return s, nil
case CPAR: // handles "()" case ClosedParToken: // handles "()"
p.consume(Values) p.consume(Values)
return util.NewStringSet(""), nil return util.NewStringSet(""), nil
default: default:
return nil, fmt.Errorf("Expected ')' or ',' or identifier. Found '%s'", lit) return nil, fmt.Errorf("found '%s', expected: ',', ')' or identifier", lit)
} }
return util.NewStringSet(), nil return util.NewStringSet(), nil
} }
// parseIdentifiersList parse a (possibly empty) list of // parseIdentifiersList parses a (possibly empty) list of
// of comma separated (possibly empty) identifiers // of comma separated (possibly empty) identifiers
func (p *Parser) parseIdentifiersList() (util.StringSet, error) { func (p *Parser) parseIdentifiersList() (util.StringSet, error) {
s := util.NewStringSet() s := util.NewStringSet()
for { for {
tok, lit := p.consume(Values) tok, lit := p.consume(Values)
switch tok { switch tok {
case IDENTIFIER: case IdentifierToken:
s.Insert(lit) s.Insert(lit)
tok2, lit2 := p.lookahead(Values) tok2, lit2 := p.lookahead(Values)
switch tok2 { switch tok2 {
case COMMA: case CommaToken:
continue continue
case CPAR: case ClosedParToken:
return s, nil return s, nil
default: default:
return nil, fmt.Errorf("Found '%s', expected ',' or ')'", lit2) return nil, fmt.Errorf("found '%s', expected: ',' or ')'", lit2)
} }
case COMMA: // handled here since we can have "(," case CommaToken: // handled here since we can have "(,"
if s.Len() == 0 { if s.Len() == 0 {
s.Insert("") // to handle (, s.Insert("") // to handle (,
} }
tok2, _ := p.lookahead(Values) tok2, _ := p.lookahead(Values)
if tok2 == CPAR { if tok2 == ClosedParToken {
s.Insert("") // to handle ,) Double "" removed by StringSet s.Insert("") // to handle ,) Double "" removed by StringSet
return s, nil return s, nil
} }
if tok2 == COMMA { if tok2 == CommaToken {
p.consume(Values) p.consume(Values)
s.Insert("") // to handle ,, Double "" removed by StringSet s.Insert("") // to handle ,, Double "" removed by StringSet
} }
default: // it can be operator default: // it can be operator
return s, fmt.Errorf("Found '%s', expected ',', or identifier", lit) return s, fmt.Errorf("found '%s', expected: ',', or identifier", lit)
} }
} }
} }
// parse the only value for exact match style // parseExactValue parses the only value for exact match style
func (p *Parser) parseExactValue() (util.StringSet, error) { func (p *Parser) parseExactValue() (util.StringSet, error) {
s := util.NewStringSet() s := util.NewStringSet()
if tok, lit := p.consume(Values); tok == IDENTIFIER { tok, lit := p.consume(Values)
if tok == IdentifierToken {
s.Insert(lit) s.Insert(lit)
} else {
return nil, fmt.Errorf("Found '%s', expected identifier", lit)
}
return s, nil return s, nil
} }
return nil, fmt.Errorf("found '%s', expected: identifier", lit)
}
// Parse takes a string representing a selector and returns a selector // Parse takes a string representing a selector and returns a selector
// object, or an error. This parsing function differs from ParseSelector // object, or an error. This parsing function differs from ParseSelector

View File

@ -302,17 +302,17 @@ func TestLexer(t *testing.T) {
s string s string
t Token t Token
}{ }{
{"", EOS}, {"", EndOfStringToken},
{",", COMMA}, {",", CommaToken},
{"notin", NOTIN}, {"notin", NotInToken},
{"in", IN}, {"in", InToken},
{"=", EQUAL}, {"=", EqualsToken},
{"==", EEQUAL}, {"==", DoubleEqualsToken},
{"!=", NEQUAL}, {"!=", NotEqualsToken},
{"(", OPAR}, {"(", OpenParToken},
{")", CPAR}, {")", ClosedParToken},
{"||", IDENTIFIER}, {"||", IdentifierToken},
{"!", ERROR}, {"!", ErrorToken},
} }
for _, v := range testcases { for _, v := range testcases {
l := &Lexer{s: v.s, pos: 0} l := &Lexer{s: v.s, pos: 0}
@ -320,7 +320,7 @@ func TestLexer(t *testing.T) {
if token != v.t { if token != v.t {
t.Errorf("Got %d it should be %d for '%s'", token, v.t, v.s) t.Errorf("Got %d it should be %d for '%s'", token, v.t, v.s)
} }
if v.t != ERROR && lit != v.s { if v.t != ErrorToken && lit != v.s {
t.Errorf("Got '%s' it should be '%s'", lit, v.s) t.Errorf("Got '%s' it should be '%s'", lit, v.s)
} }
} }
@ -339,13 +339,13 @@ func TestLexerSequence(t *testing.T) {
s string s string
t []Token t []Token
}{ }{
{"key in ( value )", []Token{IDENTIFIER, IN, OPAR, IDENTIFIER, CPAR}}, {"key in ( value )", []Token{IdentifierToken, InToken, OpenParToken, IdentifierToken, ClosedParToken}},
{"key notin ( value )", []Token{IDENTIFIER, NOTIN, OPAR, IDENTIFIER, CPAR}}, {"key notin ( value )", []Token{IdentifierToken, NotInToken, OpenParToken, IdentifierToken, ClosedParToken}},
{"key in ( value1, value2 )", []Token{IDENTIFIER, IN, OPAR, IDENTIFIER, COMMA, IDENTIFIER, CPAR}}, {"key in ( value1, value2 )", []Token{IdentifierToken, InToken, OpenParToken, IdentifierToken, CommaToken, IdentifierToken, ClosedParToken}},
{"key", []Token{IDENTIFIER}}, {"key", []Token{IdentifierToken}},
{"()", []Token{OPAR, CPAR}}, {"()", []Token{OpenParToken, ClosedParToken}},
{"x in (),y", []Token{IDENTIFIER, IN, OPAR, CPAR, COMMA, IDENTIFIER}}, {"x in (),y", []Token{IdentifierToken, InToken, OpenParToken, ClosedParToken, CommaToken, IdentifierToken}},
{"== != (), = notin", []Token{EEQUAL, NEQUAL, OPAR, CPAR, COMMA, EQUAL, NOTIN}}, {"== != (), = notin", []Token{DoubleEqualsToken, NotEqualsToken, OpenParToken, ClosedParToken, CommaToken, EqualsToken, NotInToken}},
} }
for _, v := range testcases { for _, v := range testcases {
var literals []string var literals []string
@ -353,7 +353,7 @@ func TestLexerSequence(t *testing.T) {
l := &Lexer{s: v.s, pos: 0} l := &Lexer{s: v.s, pos: 0}
for { for {
token, lit := l.Lex() token, lit := l.Lex()
if token == EOS { if token == EndOfStringToken {
break break
} }
tokens = append(tokens, token) tokens = append(tokens, token)
@ -374,14 +374,14 @@ func TestParserLookahead(t *testing.T) {
s string s string
t []Token t []Token
}{ }{
{"key in ( value )", []Token{IDENTIFIER, IN, OPAR, IDENTIFIER, CPAR, EOS}}, {"key in ( value )", []Token{IdentifierToken, InToken, OpenParToken, IdentifierToken, ClosedParToken, EndOfStringToken}},
{"key notin ( value )", []Token{IDENTIFIER, NOTIN, OPAR, IDENTIFIER, CPAR, EOS}}, {"key notin ( value )", []Token{IdentifierToken, NotInToken, OpenParToken, IdentifierToken, ClosedParToken, EndOfStringToken}},
{"key in ( value1, value2 )", []Token{IDENTIFIER, IN, OPAR, IDENTIFIER, COMMA, IDENTIFIER, CPAR, EOS}}, {"key in ( value1, value2 )", []Token{IdentifierToken, InToken, OpenParToken, IdentifierToken, CommaToken, IdentifierToken, ClosedParToken, EndOfStringToken}},
{"key", []Token{IDENTIFIER, EOS}}, {"key", []Token{IdentifierToken, EndOfStringToken}},
{"()", []Token{OPAR, CPAR, EOS}}, {"()", []Token{OpenParToken, ClosedParToken, EndOfStringToken}},
{"", []Token{EOS}}, {"", []Token{EndOfStringToken}},
{"x in (),y", []Token{IDENTIFIER, IN, OPAR, CPAR, COMMA, IDENTIFIER, EOS}}, {"x in (),y", []Token{IdentifierToken, InToken, OpenParToken, ClosedParToken, CommaToken, IdentifierToken, EndOfStringToken}},
{"== != (), = notin", []Token{EEQUAL, NEQUAL, OPAR, CPAR, COMMA, EQUAL, NOTIN, EOS}}, {"== != (), = notin", []Token{DoubleEqualsToken, NotEqualsToken, OpenParToken, ClosedParToken, CommaToken, EqualsToken, NotInToken, EndOfStringToken}},
} }
for _, v := range testcases { for _, v := range testcases {
p := &Parser{l: &Lexer{s: v.s, pos: 0}, position: 0} p := &Parser{l: &Lexer{s: v.s, pos: 0}, position: 0}
@ -393,7 +393,7 @@ func TestParserLookahead(t *testing.T) {
token, lit := p.lookahead(KeyAndOperator) token, lit := p.lookahead(KeyAndOperator)
token2, lit2 := p.consume(KeyAndOperator) token2, lit2 := p.consume(KeyAndOperator)
if token == EOS { if token == EndOfStringToken {
break break
} }
if token != token2 || lit != lit2 { if token != token2 || lit != lit2 {
@ -597,6 +597,7 @@ func getRequirement(key string, op Operator, vals util.StringSet, t *testing.T)
req, err := NewRequirement(key, op, vals) req, err := NewRequirement(key, op, vals)
if err != nil { if err != nil {
t.Errorf("NewRequirement(%v, %v, %v) resulted in error:%v", key, op, vals, err) t.Errorf("NewRequirement(%v, %v, %v) resulted in error:%v", key, op, vals, err)
return Requirement{}
} }
return *req return *req
} }

View File

@ -73,7 +73,7 @@ func TestSelectionPredicate(t *testing.T) {
} }
for name, item := range table { for name, item := range table {
parsedLabel, err := labels.ParseSelector(item.labelSelector) parsedLabel, err := labels.Parse(item.labelSelector)
if err != nil { if err != nil {
panic(err) panic(err)
} }

View File

@ -351,7 +351,7 @@ func TestListPodListSelection(t *testing.T) {
} }
for index, item := range table { for index, item := range table {
label, err := labels.ParseSelector(item.label) label, err := labels.Parse(item.label)
if err != nil { if err != nil {
t.Errorf("unexpected error: %v", err) t.Errorf("unexpected error: %v", err)
continue continue