1
0
mirror of https://github.com/rancher/steve.git synced 2025-08-05 16:33:32 +00:00
This commit is contained in:
Eric Promislow 2025-04-25 00:55:47 +00:00 committed by GitHub
commit 0bd994491d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 4441 additions and 2544 deletions

2
go.mod
View File

@ -14,7 +14,6 @@ require (
github.com/adrg/xdg v0.5.3 github.com/adrg/xdg v0.5.3
github.com/golang/protobuf v1.5.4 github.com/golang/protobuf v1.5.4
github.com/google/gnostic-models v0.6.9 github.com/google/gnostic-models v0.6.9
github.com/google/go-cmp v0.6.0
github.com/gorilla/mux v1.8.1 github.com/gorilla/mux v1.8.1
github.com/gorilla/websocket v1.5.3 github.com/gorilla/websocket v1.5.3
github.com/pborman/uuid v1.2.1 github.com/pborman/uuid v1.2.1
@ -80,6 +79,7 @@ require (
github.com/gogo/protobuf v1.3.2 // indirect github.com/gogo/protobuf v1.3.2 // indirect
github.com/google/btree v1.0.1 // indirect github.com/google/btree v1.0.1 // indirect
github.com/google/cel-go v0.22.0 // indirect github.com/google/cel-go v0.22.0 // indirect
github.com/google/go-cmp v0.6.0 // indirect
github.com/google/gofuzz v1.2.0 // indirect github.com/google/gofuzz v1.2.0 // indirect
github.com/google/uuid v1.6.0 // indirect github.com/google/uuid v1.6.0 // indirect
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 // indirect github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 // indirect

View File

@ -25,10 +25,13 @@ like any other informer, but with a wider array of options. The options are conf
### List Options ### List Options
ListOptions includes the following: ListOptions includes the following:
* Match filters for indexed fields. Filters are for specifying the value a given field in an object should be in order to * Match filters for indexed fields. Filters are for specifying the value a given field in an object should be in order to
be included in the list. Filters can be set to equals or not equals. Filters can be set to look for partial matches or be included in the list. Filters are similar to the operators on labels in the `kubectl` CLI. Filters can be set to look for partial matches or
exact (strict) matches. Filters can be OR'd and AND'd with one another. Filters only work on fields that have been indexed. exact (strict) matches. Filters can be OR'd and AND'd with one another. A query of the form `filter=field1 OP1 val1,field2 OP2 val2` is an `OR` test,
* Primary field and secondary field sorting order. Can choose up to two fields to sort on. Sort order can be ascending while separate filters are AND'd together, as in `filter=field1 OP1 val1&filter=field2 OP2 val2`.
or descending. Default sorting is to sort on metadata.namespace in ascending first and then sort on metadata.name. * Filters only work on fields that have been indexed. All `metadata.labels` are also indexed.
* Any number of sort fields can be specified, but must be comma-separated in a single `sort=....` query.
Precede each field with a dash (`-`) to sort descending. The default sort is `sort=metadata.namespace,metadata.name`
(sort first by namespace, then name).
* Page size to specify how many items to include in a response. * Page size to specify how many items to include in a response.
* Page number to specify offset. For example, a page size of 50 and a page number of 2, will return items starting at * Page number to specify offset. For example, a page size of 50 and a page number of 2, will return items starting at
index 50. Index will be dependent on sort. Page numbers start at 1. index 50. Index will be dependent on sort. Page numbers start at 1.
@ -95,10 +98,12 @@ intended to be used as a way of enforcing RBAC.
## Technical Information ## Technical Information
### SQL Tables ### SQL Tables
There are three tables that are created for the ListOption informer: There are four tables that are created for the ListOption informer:
* object table - this contains objects, including all their fields, as blobs. These blobs may be encrypted. * object table - this contains objects, including all their fields, as blobs. These blobs may be encrypted.
* fields table - this contains specific fields of value for objects. These are specified on informer create and are fields * fields table - this contains specific fields of value for objects. These are specified on informer create and are fields
that it is desired to filter or order on. that it is desired to filter or order on.
* labels table - this contains the labels for each object in the object table.
They go in a separate table because an object can have any number of labels.
* indices table - the indices table stores indexes created and objects' values for each index. This backs the generic indexer * indices table - the indices table stores indexes created and objects' values for each index. This backs the generic indexer
that contains the functionality needed to conform to cache.Indexer. that contains the functionality needed to conform to cache.Indexer.
@ -136,16 +141,12 @@ have the following indexes by default:
### ListOptions Behavior ### ListOptions Behavior
Defaults: Defaults:
* Sort.PrimaryField: `metadata.namespace` * `sort=metadata.namespace,metadata.name` (ascending order for both)
* Sort.SecondaryField: `metadata.name`
* Sort.PrimaryOrder: `ASC` (ascending)
* Sort.SecondaryOrder: `ASC` (ascending)
* All filters have partial matching set to false by default * All filters have partial matching set to false by default
There are some uncommon ways someone could use ListOptions where it would be difficult to predict what the result would be. There are some uncommon ways someone could use ListOptions where it would be difficult to predict what the result would be.
Below is a non-exhaustive list of some of these cases and what the behavior is: Below is a non-exhaustive list of some of these cases and what the behavior is:
* Setting Pagination.Page but not Pagination.PageSize will cause Page to be ignored * Setting Pagination.Page but not Pagination.PageSize will cause Page to be ignored
* Setting Sort.SecondaryField only will sort as though it was Sort.PrimaryField. Sort.SecondaryOrder will still be applied
and Sort.PrimaryOrder will be ignored and Sort.PrimaryOrder will be ignored
### Writing Secure Queries ### Writing Secure Queries

View File

@ -10,6 +10,7 @@ import (
"github.com/rancher/steve/pkg/sqlcache/db" "github.com/rancher/steve/pkg/sqlcache/db"
"github.com/rancher/steve/pkg/sqlcache/partition" "github.com/rancher/steve/pkg/sqlcache/partition"
"github.com/rancher/steve/pkg/sqlcache/sqltypes"
sqlStore "github.com/rancher/steve/pkg/sqlcache/store" sqlStore "github.com/rancher/steve/pkg/sqlcache/store"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
@ -29,7 +30,7 @@ type Informer struct {
} }
type ByOptionsLister interface { type ByOptionsLister interface {
ListByOptions(ctx context.Context, lo ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error) ListByOptions(ctx context.Context, lo *sqltypes.ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error)
} }
// this is set to a var so that it can be overridden by test code for mocking purposes // this is set to a var so that it can be overridden by test code for mocking purposes
@ -102,7 +103,7 @@ func NewInformer(ctx context.Context, client dynamic.ResourceInterface, fields [
// - the total number of resources (returned list might be a subset depending on pagination options in lo) // - the total number of resources (returned list might be a subset depending on pagination options in lo)
// - a continue token, if there are more pages after the returned one // - a continue token, if there are more pages after the returned one
// - an error instead of all of the above if anything went wrong // - an error instead of all of the above if anything went wrong
func (i *Informer) ListByOptions(ctx context.Context, lo ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error) { func (i *Informer) ListByOptions(ctx context.Context, lo *sqltypes.ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error) {
return i.ByOptionsLister.ListByOptions(ctx, lo, partitions, namespace) return i.ByOptionsLister.ListByOptions(ctx, lo, partitions, namespace)
} }

View File

@ -14,6 +14,7 @@ import (
reflect "reflect" reflect "reflect"
partition "github.com/rancher/steve/pkg/sqlcache/partition" partition "github.com/rancher/steve/pkg/sqlcache/partition"
sqltypes "github.com/rancher/steve/pkg/sqlcache/sqltypes"
gomock "go.uber.org/mock/gomock" gomock "go.uber.org/mock/gomock"
unstructured "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" unstructured "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
) )
@ -42,7 +43,7 @@ func (m *MockByOptionsLister) EXPECT() *MockByOptionsListerMockRecorder {
} }
// ListByOptions mocks base method. // ListByOptions mocks base method.
func (m *MockByOptionsLister) ListByOptions(arg0 context.Context, arg1 ListOptions, arg2 []partition.Partition, arg3 string) (*unstructured.UnstructuredList, int, string, error) { func (m *MockByOptionsLister) ListByOptions(arg0 context.Context, arg1 *sqltypes.ListOptions, arg2 []partition.Partition, arg3 string) (*unstructured.UnstructuredList, int, string, error) {
m.ctrl.T.Helper() m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListByOptions", arg0, arg1, arg2, arg3) ret := m.ctrl.Call(m, "ListByOptions", arg0, arg1, arg2, arg3)
ret0, _ := ret[0].(*unstructured.UnstructuredList) ret0, _ := ret[0].(*unstructured.UnstructuredList)

View File

@ -9,6 +9,7 @@ import (
"github.com/rancher/steve/pkg/sqlcache/db" "github.com/rancher/steve/pkg/sqlcache/db"
"github.com/rancher/steve/pkg/sqlcache/partition" "github.com/rancher/steve/pkg/sqlcache/partition"
"github.com/rancher/steve/pkg/sqlcache/sqltypes"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"go.uber.org/mock/gomock" "go.uber.org/mock/gomock"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
@ -313,7 +314,7 @@ func TestInformerListByOptions(t *testing.T) {
informer := &Informer{ informer := &Informer{
ByOptionsLister: indexer, ByOptionsLister: indexer,
} }
lo := ListOptions{} lo := sqltypes.ListOptions{}
var partitions []partition.Partition var partitions []partition.Partition
ns := "somens" ns := "somens"
expectedList := &unstructured.UnstructuredList{ expectedList := &unstructured.UnstructuredList{
@ -324,8 +325,8 @@ func TestInformerListByOptions(t *testing.T) {
} }
expectedTotal := len(expectedList.Items) expectedTotal := len(expectedList.Items)
expectedContinueToken := "123" expectedContinueToken := "123"
indexer.EXPECT().ListByOptions(context.Background(), lo, partitions, ns).Return(expectedList, expectedTotal, expectedContinueToken, nil) indexer.EXPECT().ListByOptions(context.Background(), &lo, partitions, ns).Return(expectedList, expectedTotal, expectedContinueToken, nil)
list, total, continueToken, err := informer.ListByOptions(context.Background(), lo, partitions, ns) list, total, continueToken, err := informer.ListByOptions(context.Background(), &lo, partitions, ns)
assert.Nil(t, err) assert.Nil(t, err)
assert.Equal(t, expectedList, list) assert.Equal(t, expectedList, list)
assert.Equal(t, len(expectedList.Items), total) assert.Equal(t, len(expectedList.Items), total)
@ -336,11 +337,11 @@ func TestInformerListByOptions(t *testing.T) {
informer := &Informer{ informer := &Informer{
ByOptionsLister: indexer, ByOptionsLister: indexer,
} }
lo := ListOptions{} lo := sqltypes.ListOptions{}
var partitions []partition.Partition var partitions []partition.Partition
ns := "somens" ns := "somens"
indexer.EXPECT().ListByOptions(context.Background(), lo, partitions, ns).Return(nil, 0, "", fmt.Errorf("error")) indexer.EXPECT().ListByOptions(context.Background(), &lo, partitions, ns).Return(nil, 0, "", fmt.Errorf("error"))
_, _, _, err := informer.ListByOptions(context.Background(), lo, partitions, ns) _, _, _, err := informer.ListByOptions(context.Background(), &lo, partitions, ns)
assert.NotNil(t, err) assert.NotNil(t, err)
}}) }})
t.Parallel() t.Parallel()

View File

@ -7,11 +7,11 @@ import (
"errors" "errors"
"fmt" "fmt"
"regexp" "regexp"
"sort"
"strconv" "strconv"
"strings" "strings"
"github.com/rancher/steve/pkg/sqlcache/db/transaction" "github.com/rancher/steve/pkg/sqlcache/db/transaction"
"github.com/rancher/steve/pkg/sqlcache/sqltypes"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/client-go/tools/cache" "k8s.io/client-go/tools/cache"
@ -93,6 +93,7 @@ func NewListOptionIndexer(ctx context.Context, fields [][]string, s Store, names
for _, f := range fields { for _, f := range fields {
indexedFields = append(indexedFields, toColumnName(f)) indexedFields = append(indexedFields, toColumnName(f))
} }
fmt.Println(indexedFields)
l := &ListOptionIndexer{ l := &ListOptionIndexer{
Indexer: i, Indexer: i,
@ -247,11 +248,13 @@ func (l *ListOptionIndexer) deleteLabels(key string, tx transaction.Client) erro
// - the total number of resources (returned list might be a subset depending on pagination options in lo) // - the total number of resources (returned list might be a subset depending on pagination options in lo)
// - a continue token, if there are more pages after the returned one // - a continue token, if there are more pages after the returned one
// - an error instead of all of the above if anything went wrong // - an error instead of all of the above if anything went wrong
func (l *ListOptionIndexer) ListByOptions(ctx context.Context, lo ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error) { func (l *ListOptionIndexer) ListByOptions(ctx context.Context, lo *sqltypes.ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error) {
queryInfo, err := l.constructQuery(lo, partitions, namespace, db.Sanitize(l.GetName())) queryInfo, err := l.constructQuery(lo, partitions, namespace, db.Sanitize(l.GetName()))
if err != nil { if err != nil {
return nil, 0, "", err return nil, 0, "", err
} }
logrus.Debugf("ListOptionIndexer prepared statement: %v", queryInfo.query)
logrus.Debugf("Params: %v", queryInfo.params)
return l.executeQuery(ctx, queryInfo) return l.executeQuery(ctx, queryInfo)
} }
@ -266,212 +269,6 @@ type QueryInfo struct {
offset int offset int
} }
func (l *ListOptionIndexer) constructQuery(lo ListOptions, partitions []partition.Partition, namespace string, dbName string) (*QueryInfo, error) {
ensureSortLabelsAreSelected(&lo)
queryInfo := &QueryInfo{}
queryUsesLabels := hasLabelFilter(lo.Filters)
joinTableIndexByLabelName := make(map[string]int)
// First, what kind of filtering will we be doing?
// 1- Intro: SELECT and JOIN clauses
// There's a 1:1 correspondence between a base table and its _Fields table
// but it's possible that a key has no associated labels, so if we're doing a
// non-existence test on labels we need to do a LEFT OUTER JOIN
distinctModifier := ""
if queryUsesLabels {
distinctModifier = " DISTINCT"
}
query := fmt.Sprintf(`SELECT%s o.object, o.objectnonce, o.dekid FROM "%s" o`, distinctModifier, dbName)
query += "\n "
query += fmt.Sprintf(`JOIN "%s_fields" f ON o.key = f.key`, dbName)
if queryUsesLabels {
for i, orFilter := range lo.Filters {
for j, filter := range orFilter.Filters {
if isLabelFilter(&filter) {
labelName := filter.Field[2]
_, ok := joinTableIndexByLabelName[labelName]
if !ok {
// Make the lt index 1-based for readability
jtIndex := i + j + 1
joinTableIndexByLabelName[labelName] = jtIndex
query += "\n "
query += fmt.Sprintf(`LEFT OUTER JOIN "%s_labels" lt%d ON o.key = lt%d.key`, dbName, jtIndex, jtIndex)
}
}
}
}
}
params := []any{}
// 2- Filtering: WHERE clauses (from lo.Filters)
whereClauses := []string{}
for _, orFilters := range lo.Filters {
orClause, orParams, err := l.buildORClauseFromFilters(orFilters, dbName, joinTableIndexByLabelName)
if err != nil {
return queryInfo, err
}
if orClause == "" {
continue
}
whereClauses = append(whereClauses, orClause)
params = append(params, orParams...)
}
// WHERE clauses (from namespace)
if namespace != "" && namespace != "*" {
whereClauses = append(whereClauses, fmt.Sprintf(`f."metadata.namespace" = ?`))
params = append(params, namespace)
}
// WHERE clauses (from partitions and their corresponding parameters)
partitionClauses := []string{}
for _, thisPartition := range partitions {
if thisPartition.Passthrough {
// nothing to do, no extra filtering to apply by definition
} else {
singlePartitionClauses := []string{}
// filter by namespace
if thisPartition.Namespace != "" && thisPartition.Namespace != "*" {
singlePartitionClauses = append(singlePartitionClauses, fmt.Sprintf(`f."metadata.namespace" = ?`))
params = append(params, thisPartition.Namespace)
}
// optionally filter by names
if !thisPartition.All {
names := thisPartition.Names
if len(names) == 0 {
// degenerate case, there will be no results
singlePartitionClauses = append(singlePartitionClauses, "FALSE")
} else {
singlePartitionClauses = append(singlePartitionClauses, fmt.Sprintf(`f."metadata.name" IN (?%s)`, strings.Repeat(", ?", len(thisPartition.Names)-1)))
// sort for reproducibility
sortedNames := thisPartition.Names.UnsortedList()
sort.Strings(sortedNames)
for _, name := range sortedNames {
params = append(params, name)
}
}
}
if len(singlePartitionClauses) > 0 {
partitionClauses = append(partitionClauses, strings.Join(singlePartitionClauses, " AND "))
}
}
}
if len(partitions) == 0 {
// degenerate case, there will be no results
whereClauses = append(whereClauses, "FALSE")
}
if len(partitionClauses) == 1 {
whereClauses = append(whereClauses, partitionClauses[0])
}
if len(partitionClauses) > 1 {
whereClauses = append(whereClauses, "(\n ("+strings.Join(partitionClauses, ") OR\n (")+")\n)")
}
if len(whereClauses) > 0 {
query += "\n WHERE\n "
for index, clause := range whereClauses {
query += fmt.Sprintf("(%s)", clause)
if index == len(whereClauses)-1 {
break
}
query += " AND\n "
}
}
// before proceeding, save a copy of the query and params without LIMIT/OFFSET/ORDER info
// for COUNTing all results later
countQuery := fmt.Sprintf("SELECT COUNT(*) FROM (%s)", query)
countParams := params[:]
// 3- Sorting: ORDER BY clauses (from lo.Sort)
if len(lo.Sort.Fields) != len(lo.Sort.Orders) {
return nil, fmt.Errorf("sort fields length %d != sort orders length %d", len(lo.Sort.Fields), len(lo.Sort.Orders))
}
if len(lo.Sort.Fields) > 0 {
orderByClauses := []string{}
for i, field := range lo.Sort.Fields {
if isLabelsFieldList(field) {
clause, sortParam, err := buildSortLabelsClause(field[2], joinTableIndexByLabelName, lo.Sort.Orders[i] == ASC)
if err != nil {
return nil, err
}
orderByClauses = append(orderByClauses, clause)
params = append(params, sortParam)
} else {
columnName := toColumnName(field)
if err := l.validateColumn(columnName); err != nil {
return queryInfo, err
}
direction := "ASC"
if lo.Sort.Orders[i] == DESC {
direction = "DESC"
}
orderByClauses = append(orderByClauses, fmt.Sprintf(`f."%s" %s`, columnName, direction))
}
}
query += "\n ORDER BY "
query += strings.Join(orderByClauses, ", ")
} else {
// make sure one default order is always picked
if l.namespaced {
query += "\n ORDER BY f.\"metadata.namespace\" ASC, f.\"metadata.name\" ASC "
} else {
query += "\n ORDER BY f.\"metadata.name\" ASC "
}
}
// 4- Pagination: LIMIT clause (from lo.Pagination and/or lo.ChunkSize/lo.Resume)
limitClause := ""
// take the smallest limit between lo.Pagination and lo.ChunkSize
limit := lo.Pagination.PageSize
if limit == 0 || (lo.ChunkSize > 0 && lo.ChunkSize < limit) {
limit = lo.ChunkSize
}
if limit > 0 {
limitClause = "\n LIMIT ?"
params = append(params, limit)
}
// OFFSET clause (from lo.Pagination and/or lo.Resume)
offsetClause := ""
offset := 0
if lo.Resume != "" {
offsetInt, err := strconv.Atoi(lo.Resume)
if err != nil {
return queryInfo, err
}
offset = offsetInt
}
if lo.Pagination.Page >= 1 {
offset += lo.Pagination.PageSize * (lo.Pagination.Page - 1)
}
if offset > 0 {
offsetClause = "\n OFFSET ?"
params = append(params, offset)
}
if limit > 0 || offset > 0 {
query += limitClause
query += offsetClause
queryInfo.countQuery = countQuery
queryInfo.countParams = countParams
queryInfo.limit = limit
queryInfo.offset = offset
}
// Otherwise leave these as default values and the executor won't do pagination work
logrus.Debugf("ListOptionIndexer prepared statement: %v", query)
logrus.Debugf("Params: %v", params)
queryInfo.query = query
queryInfo.params = params
return queryInfo, nil
}
func (l *ListOptionIndexer) executeQuery(ctx context.Context, queryInfo *QueryInfo) (result *unstructured.UnstructuredList, total int, token string, err error) { func (l *ListOptionIndexer) executeQuery(ctx context.Context, queryInfo *QueryInfo) (result *unstructured.UnstructuredList, total int, token string, err error) {
stmt := l.Prepare(queryInfo.query) stmt := l.Prepare(queryInfo.query)
defer func() { defer func() {
@ -529,337 +326,12 @@ func (l *ListOptionIndexer) executeQuery(ctx context.Context, queryInfo *QueryIn
return toUnstructuredList(items), total, continueToken, nil return toUnstructuredList(items), total, continueToken, nil
} }
func (l *ListOptionIndexer) validateColumn(column string) error { func extractSubFields(fields string) []string {
for _, v := range l.indexedFields { subfields := make([]string, 0)
if v == column { for _, subField := range subfieldRegex.FindAllString(fields, -1) {
return nil subfields = append(subfields, strings.TrimSuffix(subField, "."))
}
} }
return fmt.Errorf("column is invalid [%s]: %w", column, ErrInvalidColumn) return subfields
}
// buildORClause creates an SQLite compatible query that ORs conditions built from passed filters
func (l *ListOptionIndexer) buildORClauseFromFilters(orFilters OrFilter, dbName string, joinTableIndexByLabelName map[string]int) (string, []any, error) {
var params []any
clauses := make([]string, 0, len(orFilters.Filters))
var newParams []any
var newClause string
var err error
for _, filter := range orFilters.Filters {
if isLabelFilter(&filter) {
index, ok := joinTableIndexByLabelName[filter.Field[2]]
if !ok {
return "", nil, fmt.Errorf("internal error: no index for label name %s", filter.Field[2])
}
newClause, newParams, err = l.getLabelFilter(index, filter, dbName)
} else {
newClause, newParams, err = l.getFieldFilter(filter)
}
if err != nil {
return "", nil, err
}
clauses = append(clauses, newClause)
params = append(params, newParams...)
}
switch len(clauses) {
case 0:
return "", params, nil
case 1:
return clauses[0], params, nil
}
return fmt.Sprintf("(%s)", strings.Join(clauses, ") OR (")), params, nil
}
func buildSortLabelsClause(labelName string, joinTableIndexByLabelName map[string]int, isAsc bool) (string, string, error) {
ltIndex, ok := joinTableIndexByLabelName[labelName]
if !ok {
return "", "", fmt.Errorf(`internal error: no join-table index given for labelName "%s"`, labelName)
}
stmt := fmt.Sprintf(`CASE lt%d.label WHEN ? THEN lt%d.value ELSE NULL END`, ltIndex, ltIndex)
dir := "ASC"
nullsPosition := "LAST"
if !isAsc {
dir = "DESC"
nullsPosition = "FIRST"
}
return fmt.Sprintf("(%s) %s NULLS %s", stmt, dir, nullsPosition), labelName, nil
}
// If the user tries to sort on a particular label without mentioning it in a query,
// it turns out that the sort-directive is ignored. It could be that the sqlite engine
// is doing some kind of optimization on the `select distinct`, but verifying an otherwise
// unreferenced label exists solves this problem.
// And it's better to do this by modifying the ListOptions object.
// There are no thread-safety issues in doing this because the ListOptions object is
// created in Store.ListByPartitions, and that ends up calling ListOptionIndexer.ConstructQuery.
// No other goroutines access this object.
func ensureSortLabelsAreSelected(lo *ListOptions) {
if len(lo.Sort.Fields) == 0 {
return
}
unboundSortLabels := make(map[string]bool)
for _, fieldList := range lo.Sort.Fields {
if isLabelsFieldList(fieldList) {
unboundSortLabels[fieldList[2]] = true
}
}
if len(unboundSortLabels) == 0 {
return
}
// If we have sort directives but no filters, add an exists-filter for each label.
if lo.Filters == nil || len(lo.Filters) == 0 {
lo.Filters = make([]OrFilter, 1)
lo.Filters[0].Filters = make([]Filter, len(unboundSortLabels))
i := 0
for labelName := range unboundSortLabels {
lo.Filters[0].Filters[i] = Filter{
Field: []string{"metadata", "labels", labelName},
Op: Exists,
}
i++
}
return
}
// The gotcha is we have to bind the labels for each set of orFilters, so copy them each time
for i, orFilters := range lo.Filters {
copyUnboundSortLabels := make(map[string]bool, len(unboundSortLabels))
for k, v := range unboundSortLabels {
copyUnboundSortLabels[k] = v
}
for _, filter := range orFilters.Filters {
if isLabelFilter(&filter) {
copyUnboundSortLabels[filter.Field[2]] = false
}
}
// Now for any labels that are still true, add another where clause
for labelName, needsBinding := range copyUnboundSortLabels {
if needsBinding {
// `orFilters` is a copy of lo.Filters[i], so reference the original.
lo.Filters[i].Filters = append(lo.Filters[i].Filters, Filter{
Field: []string{"metadata", "labels", labelName},
Op: Exists,
})
}
}
}
}
// Possible ops from the k8s parser:
// KEY = and == (same) VALUE
// KEY != VALUE
// KEY exists [] # ,KEY, => this filter
// KEY ! [] # ,!KEY, => assert KEY doesn't exist
// KEY in VALUES
// KEY notin VALUES
func (l *ListOptionIndexer) getFieldFilter(filter Filter) (string, []any, error) {
opString := ""
escapeString := ""
columnName := toColumnName(filter.Field)
if err := l.validateColumn(columnName); err != nil {
return "", nil, err
}
switch filter.Op {
case Eq:
if filter.Partial {
opString = "LIKE"
escapeString = escapeBackslashDirective
} else {
opString = "="
}
clause := fmt.Sprintf(`f."%s" %s ?%s`, columnName, opString, escapeString)
return clause, []any{formatMatchTarget(filter)}, nil
case NotEq:
if filter.Partial {
opString = "NOT LIKE"
escapeString = escapeBackslashDirective
} else {
opString = "!="
}
clause := fmt.Sprintf(`f."%s" %s ?%s`, columnName, opString, escapeString)
return clause, []any{formatMatchTarget(filter)}, nil
case Lt, Gt:
sym, target, err := prepareComparisonParameters(filter.Op, filter.Matches[0])
if err != nil {
return "", nil, err
}
clause := fmt.Sprintf(`f."%s" %s ?`, columnName, sym)
return clause, []any{target}, nil
case Exists, NotExists:
return "", nil, errors.New("NULL and NOT NULL tests aren't supported for non-label queries")
case In:
fallthrough
case NotIn:
target := "()"
if len(filter.Matches) > 0 {
target = fmt.Sprintf("(?%s)", strings.Repeat(", ?", len(filter.Matches)-1))
}
opString = "IN"
if filter.Op == NotIn {
opString = "NOT IN"
}
clause := fmt.Sprintf(`f."%s" %s %s`, columnName, opString, target)
matches := make([]any, len(filter.Matches))
for i, match := range filter.Matches {
matches[i] = match
}
return clause, matches, nil
}
return "", nil, fmt.Errorf("unrecognized operator: %s", opString)
}
func (l *ListOptionIndexer) getLabelFilter(index int, filter Filter, dbName string) (string, []any, error) {
opString := ""
escapeString := ""
matchFmtToUse := strictMatchFmt
labelName := filter.Field[2]
switch filter.Op {
case Eq:
if filter.Partial {
opString = "LIKE"
escapeString = escapeBackslashDirective
matchFmtToUse = matchFmt
} else {
opString = "="
}
clause := fmt.Sprintf(`lt%d.label = ? AND lt%d.value %s ?%s`, index, index, opString, escapeString)
return clause, []any{labelName, formatMatchTargetWithFormatter(filter.Matches[0], matchFmtToUse)}, nil
case NotEq:
if filter.Partial {
opString = "NOT LIKE"
escapeString = escapeBackslashDirective
matchFmtToUse = matchFmt
} else {
opString = "!="
}
subFilter := Filter{
Field: filter.Field,
Op: NotExists,
}
existenceClause, subParams, err := l.getLabelFilter(index, subFilter, dbName)
if err != nil {
return "", nil, err
}
clause := fmt.Sprintf(`(%s) OR (lt%d.label = ? AND lt%d.value %s ?%s)`, existenceClause, index, index, opString, escapeString)
params := append(subParams, labelName, formatMatchTargetWithFormatter(filter.Matches[0], matchFmtToUse))
return clause, params, nil
case Lt, Gt:
sym, target, err := prepareComparisonParameters(filter.Op, filter.Matches[0])
if err != nil {
return "", nil, err
}
clause := fmt.Sprintf(`lt%d.label = ? AND lt%d.value %s ?`, index, index, sym)
return clause, []any{labelName, target}, nil
case Exists:
clause := fmt.Sprintf(`lt%d.label = ?`, index)
return clause, []any{labelName}, nil
case NotExists:
clause := fmt.Sprintf(`o.key NOT IN (SELECT o1.key FROM "%s" o1
JOIN "%s_fields" f1 ON o1.key = f1.key
LEFT OUTER JOIN "%s_labels" lt%di1 ON o1.key = lt%di1.key
WHERE lt%di1.label = ?)`, dbName, dbName, dbName, index, index, index)
return clause, []any{labelName}, nil
case In:
target := "(?"
if len(filter.Matches) > 0 {
target += strings.Repeat(", ?", len(filter.Matches)-1)
}
target += ")"
clause := fmt.Sprintf(`lt%d.label = ? AND lt%d.value IN %s`, index, index, target)
matches := make([]any, len(filter.Matches)+1)
matches[0] = labelName
for i, match := range filter.Matches {
matches[i+1] = match
}
return clause, matches, nil
case NotIn:
target := "(?"
if len(filter.Matches) > 0 {
target += strings.Repeat(", ?", len(filter.Matches)-1)
}
target += ")"
subFilter := Filter{
Field: filter.Field,
Op: NotExists,
}
existenceClause, subParams, err := l.getLabelFilter(index, subFilter, dbName)
if err != nil {
return "", nil, err
}
clause := fmt.Sprintf(`(%s) OR (lt%d.label = ? AND lt%d.value NOT IN %s)`, existenceClause, index, index, target)
matches := append(subParams, labelName)
for _, match := range filter.Matches {
matches = append(matches, match)
}
return clause, matches, nil
}
return "", nil, fmt.Errorf("unrecognized operator: %s", opString)
}
func prepareComparisonParameters(op Op, target string) (string, float64, error) {
num, err := strconv.ParseFloat(target, 32)
if err != nil {
return "", 0, err
}
switch op {
case Lt:
return "<", num, nil
case Gt:
return ">", num, nil
}
return "", 0, fmt.Errorf("unrecognized operator when expecting '<' or '>': '%s'", op)
}
func formatMatchTarget(filter Filter) string {
format := strictMatchFmt
if filter.Partial {
format = matchFmt
}
return formatMatchTargetWithFormatter(filter.Matches[0], format)
}
func formatMatchTargetWithFormatter(match string, format string) string {
// To allow matches on the backslash itself, the character needs to be replaced first.
// Otherwise, it will undo the following replacements.
match = strings.ReplaceAll(match, `\`, `\\`)
match = strings.ReplaceAll(match, `_`, `\_`)
match = strings.ReplaceAll(match, `%`, `\%`)
return fmt.Sprintf(format, match)
}
// There are two kinds of string arrays to turn into a string, based on the last value in the array
// simple: ["a", "b", "conformsToIdentifier"] => "a.b.conformsToIdentifier"
// complex: ["a", "b", "foo.io/stuff"] => "a.b[foo.io/stuff]"
func smartJoin(s []string) string {
if len(s) == 0 {
return ""
}
if len(s) == 1 {
return s[0]
}
lastBit := s[len(s)-1]
simpleName := regexp.MustCompile(`^[a-zA-Z_][a-zA-Z0-9_]*$`)
if simpleName.MatchString(lastBit) {
return strings.Join(s, ".")
}
return fmt.Sprintf("%s[%s]", strings.Join(s[0:len(s)-1], "."), lastBit)
}
// toColumnName returns the column name corresponding to a field expressed as string slice
func toColumnName(s []string) string {
return db.Sanitize(smartJoin(s))
} }
// getField extracts the value of a field expressed as a string path from an unstructured object // getField extracts the value of a field expressed as a string path from an unstructured object
@ -920,31 +392,9 @@ func getField(a any, field string) (any, error) {
return obj, nil return obj, nil
} }
func extractSubFields(fields string) []string { // toColumnName returns the column name corresponding to a field expressed as string slice
subfields := make([]string, 0) func toColumnName(s []string) string {
for _, subField := range subfieldRegex.FindAllString(fields, -1) { return db.Sanitize(smartJoin(s))
subfields = append(subfields, strings.TrimSuffix(subField, "."))
}
return subfields
}
func isLabelFilter(f *Filter) bool {
return len(f.Field) >= 2 && f.Field[0] == "metadata" && f.Field[1] == "labels"
}
func hasLabelFilter(filters []OrFilter) bool {
for _, outerFilter := range filters {
for _, filter := range outerFilter.Filters {
if isLabelFilter(&filter) {
return true
}
}
}
return false
}
func isLabelsFieldList(fields []string) bool {
return len(fields) == 3 && fields[0] == "metadata" && fields[1] == "labels"
} }
// toUnstructuredList turns a slice of unstructured objects into an unstructured.UnstructuredList // toUnstructuredList turns a slice of unstructured objects into an unstructured.UnstructuredList

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -18,9 +18,9 @@ import (
"k8s.io/client-go/tools/cache" "k8s.io/client-go/tools/cache"
"sigs.k8s.io/controller-runtime/pkg/envtest" "sigs.k8s.io/controller-runtime/pkg/envtest"
"github.com/rancher/steve/pkg/sqlcache/informer"
"github.com/rancher/steve/pkg/sqlcache/informer/factory" "github.com/rancher/steve/pkg/sqlcache/informer/factory"
"github.com/rancher/steve/pkg/sqlcache/partition" "github.com/rancher/steve/pkg/sqlcache/partition"
"github.com/rancher/steve/pkg/sqlcache/sqltypes"
) )
const testNamespace = "sql-test" const testNamespace = "sql-test"
@ -107,8 +107,8 @@ func (i *IntegrationSuite) TestSQLCacheFilters() {
err = i.waitForCacheReady(configMapNames, testNamespace, cache) err = i.waitForCacheReady(configMapNames, testNamespace, cache)
require.NoError(err) require.NoError(err)
orFiltersForFilters := func(filters ...informer.Filter) []informer.OrFilter { orFiltersForFilters := func(filters ...sqltypes.Filter) []sqltypes.OrFilter {
return []informer.OrFilter{ return []sqltypes.OrFilter{
{ {
Filters: filters, Filters: filters,
}, },
@ -116,85 +116,85 @@ func (i *IntegrationSuite) TestSQLCacheFilters() {
} }
tests := []struct { tests := []struct {
name string name string
filters []informer.OrFilter filters []sqltypes.OrFilter
wantNames []string wantNames []string
}{ }{
{ {
name: "matches filter", name: "matches filter",
filters: orFiltersForFilters(informer.Filter{ filters: orFiltersForFilters(sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"somevalue"}, Matches: []string{"somevalue"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}), }),
wantNames: []string{"matches-filter"}, wantNames: []string{"matches-filter"},
}, },
{ {
name: "partial matches filter", name: "partial matches filter",
filters: orFiltersForFilters(informer.Filter{ filters: orFiltersForFilters(sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"somevalue"}, Matches: []string{"somevalue"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}), }),
wantNames: []string{"matches-filter", "partial-matches"}, wantNames: []string{"matches-filter", "partial-matches"},
}, },
{ {
name: "no matches for filter with underscore as it is interpreted literally", name: "no matches for filter with underscore as it is interpreted literally",
filters: orFiltersForFilters(informer.Filter{ filters: orFiltersForFilters(sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"somevalu_"}, Matches: []string{"somevalu_"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}), }),
wantNames: nil, wantNames: nil,
}, },
{ {
name: "no matches for filter with percent sign as it is interpreted literally", name: "no matches for filter with percent sign as it is interpreted literally",
filters: orFiltersForFilters(informer.Filter{ filters: orFiltersForFilters(sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"somevalu%"}, Matches: []string{"somevalu%"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}), }),
wantNames: nil, wantNames: nil,
}, },
{ {
name: "match with special characters", name: "match with special characters",
filters: orFiltersForFilters(informer.Filter{ filters: orFiltersForFilters(sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"c%%l_value"}, Matches: []string{"c%%l_value"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}), }),
wantNames: []string{"special-character-matches"}, wantNames: []string{"special-character-matches"},
}, },
{ {
name: "match with literal backslash character", name: "match with literal backslash character",
filters: orFiltersForFilters(informer.Filter{ filters: orFiltersForFilters(sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{`my\windows\path`}, Matches: []string{`my\windows\path`},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}), }),
wantNames: []string{"backslash-character-matches"}, wantNames: []string{"backslash-character-matches"},
}, },
{ {
name: "not eq filter", name: "not eq filter",
filters: orFiltersForFilters(informer.Filter{ filters: orFiltersForFilters(sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"somevalue"}, Matches: []string{"somevalue"},
Op: informer.NotEq, Op: sqltypes.NotEq,
Partial: false, Partial: false,
}), }),
wantNames: []string{"partial-matches", "not-matches-filter", "missing", "special-character-matches", "backslash-character-matches"}, wantNames: []string{"partial-matches", "not-matches-filter", "missing", "special-character-matches", "backslash-character-matches"},
}, },
{ {
name: "partial not eq filter", name: "partial not eq filter",
filters: orFiltersForFilters(informer.Filter{ filters: orFiltersForFilters(sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"somevalue"}, Matches: []string{"somevalue"},
Op: informer.NotEq, Op: sqltypes.NotEq,
Partial: true, Partial: true,
}), }),
wantNames: []string{"not-matches-filter", "missing", "special-character-matches", "backslash-character-matches"}, wantNames: []string{"not-matches-filter", "missing", "special-character-matches", "backslash-character-matches"},
@ -202,16 +202,16 @@ func (i *IntegrationSuite) TestSQLCacheFilters() {
{ {
name: "multiple or filters match", name: "multiple or filters match",
filters: orFiltersForFilters( filters: orFiltersForFilters(
informer.Filter{ sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"somevalue"}, Matches: []string{"somevalue"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}, },
informer.Filter{ sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"notequal"}, Matches: []string{"notequal"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
), ),
@ -220,16 +220,16 @@ func (i *IntegrationSuite) TestSQLCacheFilters() {
{ {
name: "or filters on different fields", name: "or filters on different fields",
filters: orFiltersForFilters( filters: orFiltersForFilters(
informer.Filter{ sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"somevalue"}, Matches: []string{"somevalue"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}, },
informer.Filter{ sqltypes.Filter{
Field: []string{`metadata`, `name`}, Field: []string{`metadata`, `name`},
Matches: []string{"missing"}, Matches: []string{"missing"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
), ),
@ -237,23 +237,23 @@ func (i *IntegrationSuite) TestSQLCacheFilters() {
}, },
{ {
name: "and filters, both must match", name: "and filters, both must match",
filters: []informer.OrFilter{ filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"somevalue"}, Matches: []string{"somevalue"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}, },
}, },
}, },
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{`metadata`, `name`}, Field: []string{`metadata`, `name`},
Matches: []string{"matches-filter"}, Matches: []string{"matches-filter"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
}, },
@ -264,10 +264,10 @@ func (i *IntegrationSuite) TestSQLCacheFilters() {
{ {
name: "no matches", name: "no matches",
filters: orFiltersForFilters( filters: orFiltersForFilters(
informer.Filter{ sqltypes.Filter{
Field: []string{"metadata", "annotations", "somekey"}, Field: []string{"metadata", "annotations", "somekey"},
Matches: []string{"valueNotRepresented"}, Matches: []string{"valueNotRepresented"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
), ),
@ -278,13 +278,13 @@ func (i *IntegrationSuite) TestSQLCacheFilters() {
for _, test := range tests { for _, test := range tests {
test := test test := test
i.Run(test.name, func() { i.Run(test.name, func() {
options := informer.ListOptions{ options := sqltypes.ListOptions{
Filters: test.filters, Filters: test.filters,
} }
partitions := []partition.Partition{defaultPartition} partitions := []partition.Partition{defaultPartition}
ctx, cancel := context.WithTimeout(context.Background(), time.Second*5) ctx, cancel := context.WithTimeout(context.Background(), time.Second*5)
defer cancel() defer cancel()
cfgMaps, total, continueToken, err := cache.ListByOptions(ctx, options, partitions, testNamespace) cfgMaps, total, continueToken, err := cache.ListByOptions(ctx, &options, partitions, testNamespace)
i.Require().NoError(err) i.Require().NoError(err)
// since there's no additional pages, the continue token should be empty // since there's no additional pages, the continue token should be empty
i.Require().Equal("", continueToken) i.Require().Equal("", continueToken)
@ -334,11 +334,11 @@ func (i *IntegrationSuite) waitForCacheReady(readyResourceNames []string, namesp
ctx, cancel := context.WithTimeout(context.Background(), time.Second*30) ctx, cancel := context.WithTimeout(context.Background(), time.Second*30)
defer cancel() defer cancel()
return wait.PollUntilContextCancel(ctx, time.Millisecond*100, true, func(ctx context.Context) (done bool, err error) { return wait.PollUntilContextCancel(ctx, time.Millisecond*100, true, func(ctx context.Context) (done bool, err error) {
var options informer.ListOptions var options sqltypes.ListOptions
partitions := []partition.Partition{defaultPartition} partitions := []partition.Partition{defaultPartition}
cacheCtx, cacheCancel := context.WithTimeout(ctx, time.Second*5) cacheCtx, cacheCancel := context.WithTimeout(ctx, time.Second*5)
defer cacheCancel() defer cacheCancel()
currentResources, total, _, err := cache.ListByOptions(cacheCtx, options, partitions, namespace) currentResources, total, _, err := cache.ListByOptions(cacheCtx, &options, partitions, namespace)
if err != nil { if err != nil {
// note that we don't return the error since that would stop the polling // note that we don't return the error since that would stop the polling
return false, nil return false, nil

View File

@ -1,4 +1,4 @@
package informer package sqltypes
type Op string type Op string
@ -25,25 +25,27 @@ const (
// ListOptions represents the query parameters that may be included in a list request. // ListOptions represents the query parameters that may be included in a list request.
type ListOptions struct { type ListOptions struct {
ChunkSize int ChunkSize int `json:"chunkSize"`
Resume string Resume string `json:"resume"`
Filters []OrFilter Filters []OrFilter `json:"orFilters"`
Sort Sort SortList SortList `json:"sortList"`
Pagination Pagination Pagination Pagination `json:"pagination"`
} }
// Filter represents a field to filter by. // Filter represents a field to filter by.
// A subfield in an object is represented in a request query using . notation, e.g. 'metadata.name'. // A subfield in an object is represented in a request query using . notation, e.g. 'metadata.name'.
// The subfield is internally represented as a slice, e.g. [metadata, name]. // The subfield is internally represented as a slice, e.g. [metadata, name].
// Complex subfields need to be expressed with square brackets, as in `metadata.labels[zombo.com/moose]`, // Complex subfields need to be expressed with square brackets, as in `metadata.labels[example.com/moose]`,
// but are mapped to the string slice ["metadata", "labels", "zombo.com/moose"] // but are mapped to the string slice ["metadata", "labels", "example.com/moose"]
// //
// If more than one value is given for the `Match` field, we do an "IN (<values>)" test // If more than one value is given for the `Match` field, we do an "IN (<values>)" test
type Filter struct { type Filter struct {
Field []string Field []string `json:"fields"`
Matches []string Matches []string `json:"matches"`
Op Op Op Op `json:"op"`
Partial bool Partial bool `json:"partial"`
IsIndirect bool `json:"isIndirect"`
IndirectFields []string `json:"indirectFields"`
} }
// OrFilter represents a set of possible fields to filter by, where an item may match any filter in the set to be included in the result. // OrFilter represents a set of possible fields to filter by, where an item may match any filter in the set to be included in the result.
@ -57,12 +59,24 @@ type OrFilter struct {
// The order is represented by prefixing the sort key by '-', e.g. sort=-metadata.name. // The order is represented by prefixing the sort key by '-', e.g. sort=-metadata.name.
// e.g. To sort internal clusters first followed by clusters in alpha order: sort=-spec.internal,spec.displayName // e.g. To sort internal clusters first followed by clusters in alpha order: sort=-spec.internal,spec.displayName
type Sort struct { type Sort struct {
Fields [][]string Fields []string `json:"fields"`
Orders []SortOrder Order SortOrder `json:"order"`
IsIndirect bool `json:"isIndirect"`
IndirectFields []string `json:"indirectFields"`
}
type SortList struct {
SortDirectives []Sort `json:"sortDirectives"`
} }
// Pagination represents how to return paginated results. // Pagination represents how to return paginated results.
type Pagination struct { type Pagination struct {
PageSize int PageSize int `json:"pageSize"`
Page int Page int `json:"page"`
}
func NewSortList() *SortList {
return &SortList{
SortDirectives: []Sort{},
}
} }

View File

@ -3,6 +3,7 @@ package listprocessor
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"regexp" "regexp"
"strconv" "strconv"
@ -10,8 +11,8 @@ import (
"github.com/rancher/apiserver/pkg/apierror" "github.com/rancher/apiserver/pkg/apierror"
"github.com/rancher/apiserver/pkg/types" "github.com/rancher/apiserver/pkg/types"
"github.com/rancher/steve/pkg/sqlcache/informer"
"github.com/rancher/steve/pkg/sqlcache/partition" "github.com/rancher/steve/pkg/sqlcache/partition"
"github.com/rancher/steve/pkg/sqlcache/sqltypes"
"github.com/rancher/steve/pkg/stores/queryhelper" "github.com/rancher/steve/pkg/stores/queryhelper"
"github.com/rancher/steve/pkg/stores/sqlpartition/queryparser" "github.com/rancher/steve/pkg/stores/sqlpartition/queryparser"
"github.com/rancher/steve/pkg/stores/sqlpartition/selection" "github.com/rancher/steve/pkg/stores/sqlpartition/selection"
@ -36,27 +37,18 @@ const (
) )
var endsWithBracket = regexp.MustCompile(`^(.+)\[(.+)]$`) var endsWithBracket = regexp.MustCompile(`^(.+)\[(.+)]$`)
var mapK8sOpToRancherOp = map[selection.Operator]informer.Op{ var mapK8sOpToRancherOp = map[selection.Operator]sqltypes.Op{
selection.Equals: informer.Eq, selection.Equals: sqltypes.Eq,
selection.DoubleEquals: informer.Eq, selection.DoubleEquals: sqltypes.Eq,
selection.PartialEquals: informer.Eq, selection.PartialEquals: sqltypes.Eq,
selection.NotEquals: informer.NotEq, selection.NotEquals: sqltypes.NotEq,
selection.NotPartialEquals: informer.NotEq, selection.NotPartialEquals: sqltypes.NotEq,
selection.In: informer.In, selection.In: sqltypes.In,
selection.NotIn: informer.NotIn, selection.NotIn: sqltypes.NotIn,
selection.Exists: informer.Exists, selection.Exists: sqltypes.Exists,
selection.DoesNotExist: informer.NotExists, selection.DoesNotExist: sqltypes.NotExists,
selection.LessThan: informer.Lt, selection.LessThan: sqltypes.Lt,
selection.GreaterThan: informer.Gt, selection.GreaterThan: sqltypes.Gt,
}
// ListOptions represents the query parameters that may be included in a list request.
type ListOptions struct {
ChunkSize int
Resume string
Filters []informer.OrFilter
Sort informer.Sort
Pagination informer.Pagination
} }
type Cache interface { type Cache interface {
@ -66,10 +58,10 @@ type Cache interface {
// - the total number of resources (returned list might be a subset depending on pagination options in lo) // - the total number of resources (returned list might be a subset depending on pagination options in lo)
// - a continue token, if there are more pages after the returned one // - a continue token, if there are more pages after the returned one
// - an error instead of all of the above if anything went wrong // - an error instead of all of the above if anything went wrong
ListByOptions(ctx context.Context, lo informer.ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error) ListByOptions(ctx context.Context, lo *sqltypes.ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error)
} }
func k8sOpToRancherOp(k8sOp selection.Operator) (informer.Op, bool, error) { func k8sOpToRancherOp(k8sOp selection.Operator) (sqltypes.Op, bool, error) {
v, ok := mapK8sOpToRancherOp[k8sOp] v, ok := mapK8sOpToRancherOp[k8sOp]
if ok { if ok {
return v, k8sOp == selection.PartialEquals || k8sOp == selection.NotPartialEquals, nil return v, k8sOp == selection.PartialEquals || k8sOp == selection.NotPartialEquals, nil
@ -77,21 +69,24 @@ func k8sOpToRancherOp(k8sOp selection.Operator) (informer.Op, bool, error) {
return "", false, fmt.Errorf("unknown k8sOp: %s", k8sOp) return "", false, fmt.Errorf("unknown k8sOp: %s", k8sOp)
} }
func k8sRequirementToOrFilter(requirement queryparser.Requirement) (informer.Filter, error) { func k8sRequirementToOrFilter(requirement queryparser.Requirement) (sqltypes.Filter, error) {
values := requirement.Values() values := requirement.Values()
queryFields := splitQuery(requirement.Key()) queryFields := splitQuery(requirement.Key())
op, usePartialMatch, err := k8sOpToRancherOp(requirement.Operator()) op, usePartialMatch, err := k8sOpToRancherOp(requirement.Operator())
return informer.Filter{ isIndirect, indirectFields := requirement.IndirectInfo()
Field: queryFields, return sqltypes.Filter{
Matches: values, Field: queryFields,
Op: op, Matches: values,
Partial: usePartialMatch, Op: op,
Partial: usePartialMatch,
IsIndirect: isIndirect,
IndirectFields: indirectFields,
}, err }, err
} }
// ParseQuery parses the query params of a request and returns a ListOptions. // ParseQuery parses the query params of a request and returns a ListOptions.
func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (informer.ListOptions, error) { func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (sqltypes.ListOptions, error) {
opts := informer.ListOptions{} opts := sqltypes.ListOptions{}
opts.ChunkSize = getLimit(apiOp) opts.ChunkSize = getLimit(apiOp)
@ -100,13 +95,13 @@ func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (informer.ListOpt
opts.Resume = cont opts.Resume = cont
filterParams := q[filterParam] filterParams := q[filterParam]
filterOpts := []informer.OrFilter{} filterOpts := []sqltypes.OrFilter{}
for _, filters := range filterParams { for _, filters := range filterParams {
requirements, err := queryparser.ParseToRequirements(filters) requirements, err := queryparser.ParseToRequirements(filters, filterParam)
if err != nil { if err != nil {
return informer.ListOptions{}, err return sqltypes.ListOptions{}, err
} }
orFilter := informer.OrFilter{} orFilter := sqltypes.OrFilter{}
for _, requirement := range requirements { for _, requirement := range requirements {
filter, err := k8sRequirementToOrFilter(requirement) filter, err := k8sRequirementToOrFilter(requirement)
if err != nil { if err != nil {
@ -118,29 +113,43 @@ func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (informer.ListOpt
} }
opts.Filters = filterOpts opts.Filters = filterOpts
sortOpts := informer.Sort{} if q.Has(sortParam) {
sortKeys := q.Get(sortParam) sortKeys := q.Get(sortParam)
if sortKeys != "" { filterRequirements, err := queryparser.ParseToRequirements(sortKeys, sortParam)
sortParts := strings.Split(sortKeys, ",") if err != nil {
for _, sortPart := range sortParts { return opts, err
field := sortPart
if len(field) > 0 {
sortOrder := informer.ASC
if field[0] == '-' {
sortOrder = informer.DESC
field = field[1:]
}
if len(field) > 0 {
sortOpts.Fields = append(sortOpts.Fields, queryhelper.SafeSplit(field))
sortOpts.Orders = append(sortOpts.Orders, sortOrder)
}
}
} }
if len(filterRequirements) == 0 {
if len(sortKeys) == 0 {
return opts, errors.New("invalid sort key: <empty string>")
}
return opts, fmt.Errorf("invalid sort key: '%s'", sortKeys)
}
sortList := *sqltypes.NewSortList()
for _, requirement := range filterRequirements {
if requirement.Operator() != selection.Exists {
return opts, fmt.Errorf("sort directive %s can't contain operator (%s)", sortKeys, requirement.Operator())
}
key := requirement.Key()
order := sqltypes.ASC
if key[0] == '-' {
order = sqltypes.DESC
key = key[1:]
}
isIndirect, indirectFields := requirement.IndirectInfo()
sortDirective := sqltypes.Sort{
Fields: queryhelper.SafeSplit(key),
Order: order,
IsIndirect: isIndirect,
IndirectFields: indirectFields,
}
sortList.SortDirectives = append(sortList.SortDirectives, sortDirective)
}
opts.SortList = sortList
} }
opts.Sort = sortOpts
var err error var err error
pagination := informer.Pagination{} pagination := sqltypes.Pagination{}
pagination.PageSize, err = strconv.Atoi(q.Get(pageSizeParam)) pagination.PageSize, err = strconv.Atoi(q.Get(pageSizeParam))
if err != nil { if err != nil {
pagination.PageSize = 0 pagination.PageSize = 0
@ -151,12 +160,12 @@ func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (informer.ListOpt
} }
opts.Pagination = pagination opts.Pagination = pagination
op := informer.Eq op := sqltypes.Eq
projectsOrNamespaces := q.Get(projectsOrNamespacesVar) projectsOrNamespaces := q.Get(projectsOrNamespacesVar)
if projectsOrNamespaces == "" { if projectsOrNamespaces == "" {
projectsOrNamespaces = q.Get(projectsOrNamespacesVar + notOp) projectsOrNamespaces = q.Get(projectsOrNamespacesVar + notOp)
if projectsOrNamespaces != "" { if projectsOrNamespaces != "" {
op = informer.NotEq op = sqltypes.NotEq
} }
} }
if projectsOrNamespaces != "" { if projectsOrNamespaces != "" {
@ -167,12 +176,12 @@ func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (informer.ListOpt
if projOrNSFilters == nil { if projOrNSFilters == nil {
return opts, apierror.NewAPIError(validation.NotFound, fmt.Sprintf("could not find any namespaces named [%s] or namespaces belonging to project named [%s]", projectsOrNamespaces, projectsOrNamespaces)) return opts, apierror.NewAPIError(validation.NotFound, fmt.Sprintf("could not find any namespaces named [%s] or namespaces belonging to project named [%s]", projectsOrNamespaces, projectsOrNamespaces))
} }
if op == informer.NotEq { if op == sqltypes.NotEq {
for _, filter := range projOrNSFilters { for _, filter := range projOrNSFilters {
opts.Filters = append(opts.Filters, informer.OrFilter{Filters: []informer.Filter{filter}}) opts.Filters = append(opts.Filters, sqltypes.OrFilter{Filters: []sqltypes.Filter{filter}})
} }
} else { } else {
opts.Filters = append(opts.Filters, informer.OrFilter{Filters: projOrNSFilters}) opts.Filters = append(opts.Filters, sqltypes.OrFilter{Filters: projOrNSFilters})
} }
} }
@ -205,22 +214,22 @@ func splitQuery(query string) []string {
return strings.Split(query, ".") return strings.Split(query, ".")
} }
func parseNamespaceOrProjectFilters(ctx context.Context, projOrNS string, op informer.Op, namespaceInformer Cache) ([]informer.Filter, error) { func parseNamespaceOrProjectFilters(ctx context.Context, projOrNS string, op sqltypes.Op, namespaceInformer Cache) ([]sqltypes.Filter, error) {
var filters []informer.Filter var filters []sqltypes.Filter
for _, pn := range strings.Split(projOrNS, ",") { for _, pn := range strings.Split(projOrNS, ",") {
uList, _, _, err := namespaceInformer.ListByOptions(ctx, informer.ListOptions{ uList, _, _, err := namespaceInformer.ListByOptions(ctx, &sqltypes.ListOptions{
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "name"}, Field: []string{"metadata", "name"},
Matches: []string{pn}, Matches: []string{pn},
Op: informer.Eq, Op: sqltypes.Eq,
}, },
{ {
Field: []string{"metadata", "labels", "field.cattle.io/projectId"}, Field: []string{"metadata", "labels", "field.cattle.io/projectId"},
Matches: []string{pn}, Matches: []string{pn},
Op: informer.Eq, Op: sqltypes.Eq,
}, },
}, },
}, },
@ -230,7 +239,7 @@ func parseNamespaceOrProjectFilters(ctx context.Context, projOrNS string, op inf
return filters, err return filters, err
} }
for _, item := range uList.Items { for _, item := range uList.Items {
filters = append(filters, informer.Filter{ filters = append(filters, sqltypes.Filter{
Field: []string{"metadata", "namespace"}, Field: []string{"metadata", "namespace"},
Matches: []string{item.GetName()}, Matches: []string{item.GetName()},
Op: op, Op: op,

View File

@ -8,8 +8,8 @@ import (
"testing" "testing"
"github.com/rancher/apiserver/pkg/types" "github.com/rancher/apiserver/pkg/types"
"github.com/rancher/steve/pkg/sqlcache/informer"
"github.com/rancher/steve/pkg/sqlcache/partition" "github.com/rancher/steve/pkg/sqlcache/partition"
"github.com/rancher/steve/pkg/sqlcache/sqltypes"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"go.uber.org/mock/gomock" "go.uber.org/mock/gomock"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
@ -23,7 +23,7 @@ func TestParseQuery(t *testing.T) {
setupNSCache func() Cache setupNSCache func() Cache
nsc Cache nsc Cache
req *types.APIRequest req *types.APIRequest
expectedLO informer.ListOptions expectedLO sqltypes.ListOptions
errExpected bool errExpected bool
errorText string errorText string
} }
@ -35,10 +35,10 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: ""}, URL: &url.URL{RawQuery: ""},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: make([]informer.OrFilter, 0), Filters: make([]sqltypes.OrFilter, 0),
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -51,21 +51,21 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "projectsornamespaces=somethin"}, URL: &url.URL{RawQuery: "projectsornamespaces=somethin"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "namespace"}, Field: []string{"metadata", "namespace"},
Matches: []string{"ns1"}, Matches: []string{"ns1"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -82,19 +82,19 @@ func TestParseQuery(t *testing.T) {
}, },
} }
nsc := NewMockCache(gomock.NewController(t)) nsc := NewMockCache(gomock.NewController(t))
nsc.EXPECT().ListByOptions(context.Background(), informer.ListOptions{ nsc.EXPECT().ListByOptions(context.Background(), &sqltypes.ListOptions{
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "name"}, Field: []string{"metadata", "name"},
Matches: []string{"somethin"}, Matches: []string{"somethin"},
Op: informer.Eq, Op: sqltypes.Eq,
}, },
{ {
Field: []string{"metadata", "labels", "field.cattle.io/projectId"}, Field: []string{"metadata", "labels", "field.cattle.io/projectId"},
Matches: []string{"somethin"}, Matches: []string{"somethin"},
Op: informer.Eq, Op: sqltypes.Eq,
}, },
}, },
}, },
@ -111,40 +111,40 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "projectsornamespaces=somethin"}, URL: &url.URL{RawQuery: "projectsornamespaces=somethin"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "namespace"}, Field: []string{"metadata", "namespace"},
Matches: []string{"ns1"}, Matches: []string{"ns1"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
errExpected: true, errExpected: true,
setupNSCache: func() Cache { setupNSCache: func() Cache {
nsi := NewMockCache(gomock.NewController(t)) nsi := NewMockCache(gomock.NewController(t))
nsi.EXPECT().ListByOptions(context.Background(), informer.ListOptions{ nsi.EXPECT().ListByOptions(context.Background(), &sqltypes.ListOptions{
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "name"}, Field: []string{"metadata", "name"},
Matches: []string{"somethin"}, Matches: []string{"somethin"},
Op: informer.Eq, Op: sqltypes.Eq,
}, },
{ {
Field: []string{"metadata", "labels", "field.cattle.io/projectId"}, Field: []string{"metadata", "labels", "field.cattle.io/projectId"},
Matches: []string{"somethin"}, Matches: []string{"somethin"},
Op: informer.Eq, Op: sqltypes.Eq,
}, },
}, },
}, },
@ -161,21 +161,21 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "projectsornamespaces=somethin"}, URL: &url.URL{RawQuery: "projectsornamespaces=somethin"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "namespace"}, Field: []string{"metadata", "namespace"},
Matches: []string{"ns1"}, Matches: []string{"ns1"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -185,19 +185,19 @@ func TestParseQuery(t *testing.T) {
Items: []unstructured.Unstructured{}, Items: []unstructured.Unstructured{},
} }
nsi := NewMockCache(gomock.NewController(t)) nsi := NewMockCache(gomock.NewController(t))
nsi.EXPECT().ListByOptions(context.Background(), informer.ListOptions{ nsi.EXPECT().ListByOptions(context.Background(), &sqltypes.ListOptions{
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "name"}, Field: []string{"metadata", "name"},
Matches: []string{"somethin"}, Matches: []string{"somethin"},
Op: informer.Eq, Op: sqltypes.Eq,
}, },
{ {
Field: []string{"metadata", "labels", "field.cattle.io/projectId"}, Field: []string{"metadata", "labels", "field.cattle.io/projectId"},
Matches: []string{"somethin"}, Matches: []string{"somethin"},
Op: informer.Eq, Op: sqltypes.Eq,
}, },
}, },
}, },
@ -213,21 +213,21 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=a~c"}, URL: &url.URL{RawQuery: "filter=a~c"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"a"}, Field: []string{"a"},
Matches: []string{"c"}, Matches: []string{"c"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -239,21 +239,21 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=a=c"}, URL: &url.URL{RawQuery: "filter=a=c"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"a"}, Field: []string{"a"},
Matches: []string{"c"}, Matches: []string{"c"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -274,21 +274,21 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=metadata.labels[grover.example.com/fish]~heads"}, URL: &url.URL{RawQuery: "filter=metadata.labels[grover.example.com/fish]~heads"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "labels", "grover.example.com/fish"}, Field: []string{"metadata", "labels", "grover.example.com/fish"},
Matches: []string{"heads"}, Matches: []string{"heads"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -300,21 +300,21 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=metadata.annotations[chumley.example.com/fish]=seals"}, URL: &url.URL{RawQuery: "filter=metadata.annotations[chumley.example.com/fish]=seals"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "annotations", "chumley.example.com/fish"}, Field: []string{"metadata", "annotations", "chumley.example.com/fish"},
Matches: []string{"seals"}, Matches: []string{"seals"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -326,20 +326,20 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=metadata.fields[3]<5"}, URL: &url.URL{RawQuery: "filter=metadata.fields[3]<5"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "fields", "3"}, Field: []string{"metadata", "fields", "3"},
Matches: []string{"5"}, Matches: []string{"5"},
Op: informer.Lt, Op: sqltypes.Lt,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -351,21 +351,48 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=metadata.labels[grover.example.com/fish]~heads"}, URL: &url.URL{RawQuery: "filter=metadata.labels[grover.example.com/fish]~heads"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "labels", "grover.example.com/fish"}, Field: []string{"metadata", "labels", "grover.example.com/fish"},
Matches: []string{"heads"}, Matches: []string{"heads"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1,
},
},
})
tests = append(tests, testCase{
description: "ParseQuery() with an indirect labels filter param should create an indirect labels-specific filter.",
req: &types.APIRequest{
Request: &http.Request{
URL: &url.URL{RawQuery: "filter=metadata.labels[grover.example.com/fish]=>[_v1][Foods][foodCode][country]=japan"},
},
},
expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit,
Filters: []sqltypes.OrFilter{
{
Filters: []sqltypes.Filter{
{
Field: []string{"metadata", "labels", "grover.example.com/fish"},
Matches: []string{"japan"},
Op: sqltypes.Eq,
IsIndirect: true,
IndirectFields: []string{"_v1", "Foods", "foodCode", "country"},
},
},
},
},
Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -377,31 +404,31 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=a=c&filter=b=d"}, URL: &url.URL{RawQuery: "filter=a=c&filter=b=d"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"a"}, Field: []string{"a"},
Matches: []string{"c"}, Matches: []string{"c"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
}, },
}, },
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"b"}, Field: []string{"b"},
Matches: []string{"d"}, Matches: []string{"d"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -413,31 +440,31 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=a=c&filter=b=d"}, URL: &url.URL{RawQuery: "filter=a=c&filter=b=d"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"a"}, Field: []string{"a"},
Matches: []string{"c"}, Matches: []string{"c"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
}, },
}, },
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"b"}, Field: []string{"b"},
Matches: []string{"d"}, Matches: []string{"d"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -449,27 +476,27 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=beer=pabst,metadata.labels[beer2.io/ale] ~schlitz"}, URL: &url.URL{RawQuery: "filter=beer=pabst,metadata.labels[beer2.io/ale] ~schlitz"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"beer"}, Field: []string{"beer"},
Matches: []string{"pabst"}, Matches: []string{"pabst"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
{ {
Field: []string{"metadata", "labels", "beer2.io/ale"}, Field: []string{"metadata", "labels", "beer2.io/ale"},
Matches: []string{"schlitz"}, Matches: []string{"schlitz"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -481,27 +508,27 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=beer=natty-bo,metadata.labels.beer3~rainier"}, URL: &url.URL{RawQuery: "filter=beer=natty-bo,metadata.labels.beer3~rainier"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"beer"}, Field: []string{"beer"},
Matches: []string{"natty-bo"}, Matches: []string{"natty-bo"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: false, Partial: false,
}, },
{ {
Field: []string{"metadata", "labels", "beer3"}, Field: []string{"metadata", "labels", "beer3"},
Matches: []string{"rainier"}, Matches: []string{"rainier"},
Op: informer.Eq, Op: sqltypes.Eq,
Partial: true, Partial: true,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -513,27 +540,27 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=a1In in (x1),a2In IN (x2)"}, URL: &url.URL{RawQuery: "filter=a1In in (x1),a2In IN (x2)"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"a1In"}, Field: []string{"a1In"},
Matches: []string{"x1"}, Matches: []string{"x1"},
Op: informer.In, Op: sqltypes.In,
Partial: false, Partial: false,
}, },
{ {
Field: []string{"a2In"}, Field: []string{"a2In"},
Matches: []string{"x2"}, Matches: []string{"x2"},
Op: informer.In, Op: sqltypes.In,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -545,21 +572,21 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=a2In in (x2a, x2b)"}, URL: &url.URL{RawQuery: "filter=a2In in (x2a, x2b)"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"a2In"}, Field: []string{"a2In"},
Matches: []string{"x2a", "x2b"}, Matches: []string{"x2a", "x2b"},
Op: informer.In, Op: sqltypes.In,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -571,27 +598,27 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=a1NotIn notin (x1),a2NotIn NOTIN (x2)"}, URL: &url.URL{RawQuery: "filter=a1NotIn notin (x1),a2NotIn NOTIN (x2)"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"a1NotIn"}, Field: []string{"a1NotIn"},
Matches: []string{"x1"}, Matches: []string{"x1"},
Op: informer.NotIn, Op: sqltypes.NotIn,
Partial: false, Partial: false,
}, },
{ {
Field: []string{"a2NotIn"}, Field: []string{"a2NotIn"},
Matches: []string{"x2"}, Matches: []string{"x2"},
Op: informer.NotIn, Op: sqltypes.NotIn,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -603,21 +630,21 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=a3NotIn in (x3a, x3b)"}, URL: &url.URL{RawQuery: "filter=a3NotIn in (x3a, x3b)"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"a3NotIn"}, Field: []string{"a3NotIn"},
Matches: []string{"x3a", "x3b"}, Matches: []string{"x3a", "x3b"},
Op: informer.In, Op: sqltypes.In,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -629,27 +656,27 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=a4In iN (x4a),a4NotIn nOtIn (x4b)"}, URL: &url.URL{RawQuery: "filter=a4In iN (x4a),a4NotIn nOtIn (x4b)"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"a4In"}, Field: []string{"a4In"},
Matches: []string{"x4a"}, Matches: []string{"x4a"},
Op: informer.In, Op: sqltypes.In,
Partial: false, Partial: false,
}, },
{ {
Field: []string{"a4NotIn"}, Field: []string{"a4NotIn"},
Matches: []string{"x4b"}, Matches: []string{"x4b"},
Op: informer.NotIn, Op: sqltypes.NotIn,
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -671,33 +698,33 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=metadata.labels.a5In1,!metadata.labels.a5In2, ! metadata.labels.a5In3"}, URL: &url.URL{RawQuery: "filter=metadata.labels.a5In1,!metadata.labels.a5In2, ! metadata.labels.a5In3"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"metadata", "labels", "a5In1"}, Field: []string{"metadata", "labels", "a5In1"},
Op: informer.Exists, Op: sqltypes.Exists,
Matches: []string{}, Matches: []string{},
Partial: false, Partial: false,
}, },
{ {
Field: []string{"metadata", "labels", "a5In2"}, Field: []string{"metadata", "labels", "a5In2"},
Op: informer.NotExists, Op: sqltypes.NotExists,
Matches: []string{}, Matches: []string{},
Partial: false, Partial: false,
}, },
{ {
Field: []string{"metadata", "labels", "a5In3"}, Field: []string{"metadata", "labels", "a5In3"},
Op: informer.NotExists, Op: sqltypes.NotExists,
Matches: []string{}, Matches: []string{},
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -709,27 +736,27 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "filter=a<1,b>2"}, URL: &url.URL{RawQuery: "filter=a<1,b>2"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: []informer.OrFilter{ Filters: []sqltypes.OrFilter{
{ {
Filters: []informer.Filter{ Filters: []sqltypes.Filter{
{ {
Field: []string{"a"}, Field: []string{"a"},
Op: informer.Lt, Op: sqltypes.Lt,
Matches: []string{"1"}, Matches: []string{"1"},
Partial: false, Partial: false,
}, },
{ {
Field: []string{"b"}, Field: []string{"b"},
Op: informer.Gt, Op: sqltypes.Gt,
Matches: []string{"2"}, Matches: []string{"2"},
Partial: false, Partial: false,
}, },
}, },
}, },
}, },
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -742,15 +769,18 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "sort=metadata.name"}, URL: &url.URL{RawQuery: "sort=metadata.name"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Sort: informer.Sort{ SortList: sqltypes.SortList{
Fields: [][]string{ SortDirectives: []sqltypes.Sort{
{"metadata", "name"}}, {
Orders: []informer.SortOrder{informer.ASC}, Fields: []string{"metadata", "name"},
Order: sqltypes.ASC,
},
},
}, },
Filters: make([]informer.OrFilter, 0), Filters: make([]sqltypes.OrFilter, 0),
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -763,14 +793,18 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "sort=-metadata.name"}, URL: &url.URL{RawQuery: "sort=-metadata.name"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Sort: informer.Sort{ SortList: sqltypes.SortList{
Fields: [][]string{{"metadata", "name"}}, SortDirectives: []sqltypes.Sort{
Orders: []informer.SortOrder{informer.DESC}, {
Fields: []string{"metadata", "name"},
Order: sqltypes.DESC,
},
},
}, },
Filters: make([]informer.OrFilter, 0), Filters: make([]sqltypes.OrFilter, 0),
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -783,20 +817,22 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "sort=-metadata.name,spec.something"}, URL: &url.URL{RawQuery: "sort=-metadata.name,spec.something"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Sort: informer.Sort{ SortList: sqltypes.SortList{
Fields: [][]string{ SortDirectives: []sqltypes.Sort{
{"metadata", "name"}, {
{"spec", "something"}, Fields: []string{"metadata", "name"},
}, Order: sqltypes.DESC,
Orders: []informer.SortOrder{ },
informer.DESC, {
informer.ASC, Fields: []string{"spec", "something"},
Order: sqltypes.ASC,
},
}, },
}, },
Filters: make([]informer.OrFilter, 0), Filters: make([]sqltypes.OrFilter, 0),
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -809,17 +845,30 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "sort=-metadata.labels[beef.cattle.io/snort],metadata.labels.steer,metadata.labels[bossie.cattle.io/moo],spec.something"}, URL: &url.URL{RawQuery: "sort=-metadata.labels[beef.cattle.io/snort],metadata.labels.steer,metadata.labels[bossie.cattle.io/moo],spec.something"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Sort: informer.Sort{ SortList: sqltypes.SortList{
Fields: [][]string{{"metadata", "labels", "beef.cattle.io/snort"}, SortDirectives: []sqltypes.Sort{
{"metadata", "labels", "steer"}, {
{"metadata", "labels", "bossie.cattle.io/moo"}, Fields: []string{"metadata", "labels", "beef.cattle.io/snort"},
{"spec", "something"}}, Order: sqltypes.DESC,
Orders: []informer.SortOrder{informer.DESC, informer.ASC, informer.ASC, informer.ASC}, },
{
Fields: []string{"metadata", "labels", "steer"},
Order: sqltypes.ASC,
},
{
Fields: []string{"metadata", "labels", "bossie.cattle.io/moo"},
Order: sqltypes.ASC,
},
{
Fields: []string{"spec", "something"},
Order: sqltypes.ASC,
},
},
}, },
Filters: make([]informer.OrFilter, 0), Filters: make([]sqltypes.OrFilter, 0),
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -835,11 +884,11 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "continue=5"}, URL: &url.URL{RawQuery: "continue=5"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Resume: "5", Resume: "5",
Filters: make([]informer.OrFilter, 0), Filters: make([]sqltypes.OrFilter, 0),
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -852,11 +901,11 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "continue=5"}, URL: &url.URL{RawQuery: "continue=5"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Resume: "5", Resume: "5",
Filters: make([]informer.OrFilter, 0), Filters: make([]sqltypes.OrFilter, 0),
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -869,10 +918,10 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "limit=3"}, URL: &url.URL{RawQuery: "limit=3"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: 3, ChunkSize: 3,
Filters: make([]informer.OrFilter, 0), Filters: make([]sqltypes.OrFilter, 0),
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 1, Page: 1,
}, },
}, },
@ -885,10 +934,10 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "page=3"}, URL: &url.URL{RawQuery: "page=3"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: make([]informer.OrFilter, 0), Filters: make([]sqltypes.OrFilter, 0),
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
Page: 3, Page: 3,
}, },
}, },
@ -901,10 +950,10 @@ func TestParseQuery(t *testing.T) {
URL: &url.URL{RawQuery: "pagesize=20"}, URL: &url.URL{RawQuery: "pagesize=20"},
}, },
}, },
expectedLO: informer.ListOptions{ expectedLO: sqltypes.ListOptions{
ChunkSize: defaultLimit, ChunkSize: defaultLimit,
Filters: make([]informer.OrFilter, 0), Filters: make([]sqltypes.OrFilter, 0),
Pagination: informer.Pagination{ Pagination: sqltypes.Pagination{
PageSize: 20, PageSize: 20,
Page: 1, Page: 1,
}, },

View File

@ -13,8 +13,8 @@ import (
context "context" context "context"
reflect "reflect" reflect "reflect"
informer "github.com/rancher/steve/pkg/sqlcache/informer"
partition "github.com/rancher/steve/pkg/sqlcache/partition" partition "github.com/rancher/steve/pkg/sqlcache/partition"
sqltypes "github.com/rancher/steve/pkg/sqlcache/sqltypes"
gomock "go.uber.org/mock/gomock" gomock "go.uber.org/mock/gomock"
unstructured "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" unstructured "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
) )
@ -43,7 +43,7 @@ func (m *MockCache) EXPECT() *MockCacheMockRecorder {
} }
// ListByOptions mocks base method. // ListByOptions mocks base method.
func (m *MockCache) ListByOptions(arg0 context.Context, arg1 informer.ListOptions, arg2 []partition.Partition, arg3 string) (*unstructured.UnstructuredList, int, string, error) { func (m *MockCache) ListByOptions(arg0 context.Context, arg1 *sqltypes.ListOptions, arg2 []partition.Partition, arg3 string) (*unstructured.UnstructuredList, int, string, error) {
m.ctrl.T.Helper() m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListByOptions", arg0, arg1, arg2, arg3) ret := m.ctrl.Call(m, "ListByOptions", arg0, arg1, arg2, arg3)
ret0, _ := ret[0].(*unstructured.UnstructuredList) ret0, _ := ret[0].(*unstructured.UnstructuredList)

View File

@ -39,6 +39,9 @@ the array into a sql statement. So the set gives us no benefit apart from removi
6. We allow `lt` and `gt` as aliases for `<` and `>`. 6. We allow `lt` and `gt` as aliases for `<` and `>`.
7. We added the '~' and '!~' operators to indicate partial match and non-match 7. We added the '~' and '!~' operators to indicate partial match and non-match
8. We added indirect field selection so we can base a filter or sort off a related value
(could be in a different table)
*/ */
package queryparser package queryparser
@ -70,6 +73,7 @@ var (
string(selection.Equals), string(selection.DoubleEquals), string(selection.NotEquals), string(selection.Equals), string(selection.DoubleEquals), string(selection.NotEquals),
string(selection.PartialEquals), string(selection.NotPartialEquals), string(selection.PartialEquals), string(selection.NotPartialEquals),
string(selection.GreaterThan), string(selection.LessThan), string(selection.GreaterThan), string(selection.LessThan),
string(selection.IndirectSelector),
} }
validRequirementOperators = append(binaryOperators, unaryOperators...) validRequirementOperators = append(binaryOperators, unaryOperators...)
labelSelectorRegex = regexp.MustCompile(`^metadata.labels(?:\.\w[-a-zA-Z0-9_./]*|\[.*])$`) labelSelectorRegex = regexp.MustCompile(`^metadata.labels(?:\.\w[-a-zA-Z0-9_./]*|\[.*])$`)
@ -135,7 +139,9 @@ type Requirement struct {
// In huge majority of cases we have at most one value here. // In huge majority of cases we have at most one value here.
// It is generally faster to operate on a single-element slice // It is generally faster to operate on a single-element slice
// than on a single-element map, so we have a slice here. // than on a single-element map, so we have a slice here.
strValues []string strValues []string
isIndirect bool
indirectFields []string
} }
// NewRequirement is the constructor for a Requirement. // NewRequirement is the constructor for a Requirement.
@ -183,7 +189,26 @@ func NewRequirement(key string, op selection.Operator, vals []string, opts ...fi
default: default:
allErrs = append(allErrs, field.NotSupported(path.Child("operator"), op, validRequirementOperators)) allErrs = append(allErrs, field.NotSupported(path.Child("operator"), op, validRequirementOperators))
} }
return &Requirement{key: key, operator: op, strValues: vals}, allErrs.ToAggregate() agg := allErrs.ToAggregate()
var err error
if agg != nil {
err = errors.New(agg.Error())
}
return &Requirement{key: key, operator: op, strValues: vals}, err
}
func NewIndirectRequirement(key string, indirectFields []string, newOperator *selection.Operator, targetValues []string, opts ...field.PathOption) (*Requirement, error) {
if newOperator == nil {
operator := selection.Exists
newOperator = &operator
}
r, err := NewRequirement(key, *newOperator, targetValues)
if err != nil {
return nil, err
}
r.isIndirect = true
r.indirectFields = indirectFields
return r, nil
} }
func (r *Requirement) hasValue(value string) bool { func (r *Requirement) hasValue(value string) bool {
@ -214,6 +239,10 @@ func (r *Requirement) Values() []string {
return ret.List() return ret.List()
} }
func (r *Requirement) IndirectInfo() (bool, []string) {
return r.isIndirect, r.indirectFields
}
// Equal checks the equality of requirement. // Equal checks the equality of requirement.
func (r Requirement) Equal(x Requirement) bool { func (r Requirement) Equal(x Requirement) bool {
if r.key != x.key { if r.key != x.key {
@ -377,6 +406,8 @@ const (
NotPartialEqualsToken NotPartialEqualsToken
// OpenParToken represents open parenthesis // OpenParToken represents open parenthesis
OpenParToken OpenParToken
// IndirectAccessToken is =>, used to associate one table with a related one, and grab a different field
IndirectAccessToken
) )
// string2token contains the mapping between lexer Token and token literal // string2token contains the mapping between lexer Token and token literal
@ -395,6 +426,7 @@ var string2token = map[string]Token{
"!~": NotPartialEqualsToken, "!~": NotPartialEqualsToken,
"notin": NotInToken, "notin": NotInToken,
"(": OpenParToken, "(": OpenParToken,
"=>": IndirectAccessToken,
} }
// ScannedItem contains the Token and the literal produced by the lexer. // ScannedItem contains the Token and the literal produced by the lexer.
@ -405,7 +437,7 @@ type ScannedItem struct {
func isIdentifierStartChar(ch byte) bool { func isIdentifierStartChar(ch byte) bool {
r := rune(ch) r := rune(ch)
return unicode.IsLetter(r) || unicode.IsDigit(r) || ch == '_' return unicode.IsLetter(r) || unicode.IsDigit(r) || ch == '_' || ch == '[' || ch == '-'
} }
// isWhitespace returns true if the rune is a space, tab, or newline. // isWhitespace returns true if the rune is a space, tab, or newline.
@ -531,6 +563,7 @@ type Parser struct {
l *Lexer l *Lexer
scannedItems []ScannedItem scannedItems []ScannedItem
position int position int
parseType string
path *field.Path path *field.Path
} }
@ -624,13 +657,18 @@ func (p *Parser) parseRequirement() (*Requirement, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
fieldPath := field.WithPath(p.path)
if operator == selection.Exists || operator == selection.DoesNotExist { // operator found lookahead set checked if operator == selection.Exists || operator == selection.DoesNotExist { // operator found lookahead set checked
if !labelSelectorRegex.MatchString(key) { if p.parseType == "filter" && !labelSelectorRegex.MatchString(key) {
return nil, fmt.Errorf("existence tests are valid only for labels; not valid for field '%s'", key) return nil, fmt.Errorf("existence tests are valid only for labels; not valid for field '%s'", key)
} }
return NewRequirement(key, operator, []string{}, field.WithPath(p.path)) return NewRequirement(key, operator, []string{}, fieldPath)
} }
operator, err = p.parseOperator() return p.parseOperatorAndValues(key, fieldPath, true)
}
func (p *Parser) parseOperatorAndValues(key string, fieldPath field.PathOption, allowIndirectSelector bool) (*Requirement, error) {
operator, err := p.parseOperator()
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -640,12 +678,22 @@ func (p *Parser) parseRequirement() (*Requirement, error) {
values, err = p.parseValues() values, err = p.parseValues()
case selection.Equals, selection.DoubleEquals, selection.NotEquals, selection.GreaterThan, selection.LessThan, selection.PartialEquals, selection.NotPartialEquals: case selection.Equals, selection.DoubleEquals, selection.NotEquals, selection.GreaterThan, selection.LessThan, selection.PartialEquals, selection.NotPartialEquals:
values, err = p.parseSingleValue() values, err = p.parseSingleValue()
case selection.IndirectSelector:
if !allowIndirectSelector {
return nil, fmt.Errorf("found a subsequent indirect selector (->)")
}
indirectFields, newOperator, targetValues, err := p.parseIndirectAccessorPart(key, fieldPath)
if err != nil {
return nil, err
} else if newOperator != nil && p.parseType == "sort" {
return nil, fmt.Errorf("found an operator (%s) in a sort expression )", *newOperator)
}
return NewIndirectRequirement(key, indirectFields, newOperator, targetValues.List(), fieldPath)
} }
if err != nil { if err != nil {
return nil, err return nil, err
} }
return NewRequirement(key, operator, values.List(), field.WithPath(p.path)) return NewRequirement(key, operator, values.List(), field.WithPath(p.path))
} }
// parseKeyAndInferOperator parses literals. // parseKeyAndInferOperator parses literals.
@ -694,11 +742,15 @@ func (p *Parser) parseOperator() (op selection.Operator, err error) {
op = selection.NotEquals op = selection.NotEquals
case NotPartialEqualsToken: case NotPartialEqualsToken:
op = selection.NotPartialEquals op = selection.NotPartialEquals
case IndirectAccessToken:
op = selection.IndirectSelector
default: default:
if lit == "lt" { if lit == "lt" {
op = selection.LessThan op = selection.LessThan
} else if lit == "gt" { } else if lit == "gt" {
op = selection.GreaterThan op = selection.GreaterThan
} else if p.parseType == "sort" {
return "", fmt.Errorf("found unexpected token '%s' in sort parameter", lit)
} else { } else {
return "", fmt.Errorf("found '%s', expected: %v", lit, strings.Join(binaryOperators, ", ")) return "", fmt.Errorf("found '%s', expected: %v", lit, strings.Join(binaryOperators, ", "))
} }
@ -727,10 +779,38 @@ func (p *Parser) parseValues() (sets.String, error) {
p.consume(Values) p.consume(Values)
return sets.NewString(""), nil return sets.NewString(""), nil
default: default:
return nil, fmt.Errorf("found '%s', expected: ',', ')' or identifier", lit) return sets.NewString(""), fmt.Errorf("found '%s', expected: ',', ')' or identifier", lit)
} }
} }
func (p *Parser) parseIndirectAccessorPart(key string, fieldPath field.PathOption) ([]string, *selection.Operator, sets.String, error) {
//key string, indirectFields []string, newOperator selection.Operator, targetValues []string
values := sets.String{}
tok, lit := p.consume(Values)
if tok != IdentifierToken {
return nil, nil, values, fmt.Errorf("found '%s', expected: an indirect field specifier", lit)
}
matched, err := regexp.MatchString(`^(?:\[.*?\])+$`, lit)
if err != nil {
return nil, nil, values, err
} else if !matched {
return nil, nil, values, fmt.Errorf("found '%s', expected: a sequence of bracketed identifiers", lit)
}
indirectFields := strings.Split(lit[1:len(lit)-1], "][")
if len(indirectFields) != 4 {
return nil, nil, values, fmt.Errorf("found '%s', expected: a sequence of three bracketed identifiers", lit)
}
if p.parseType == "sort" {
return indirectFields, nil, sets.NewString(), nil
}
r, err := p.parseOperatorAndValues(key, fieldPath, false)
if err != nil {
return nil, nil, values, err
}
return indirectFields, &r.operator, sets.NewString(r.strValues...), nil
}
// parseIdentifiersList parses a (possibly empty) list of // parseIdentifiersList parses a (possibly empty) list of
// of comma separated (possibly empty) identifiers // of comma separated (possibly empty) identifiers
func (p *Parser) parseIdentifiersList() (sets.String, error) { func (p *Parser) parseIdentifiersList() (sets.String, error) {
@ -814,9 +894,9 @@ func (p *Parser) parseSingleValue() (sets.String, error) {
// 4. A requirement with just a KEY - as in "y" above - denotes that // 4. A requirement with just a KEY - as in "y" above - denotes that
// the KEY exists and can be any VALUE. // the KEY exists and can be any VALUE.
// 5. A requirement with just !KEY requires that the KEY not exist. // 5. A requirement with just !KEY requires that the KEY not exist.
func Parse(selector string, opts ...field.PathOption) (Selector, error) { func Parse(selector string, parseType string, opts ...field.PathOption) (Selector, error) {
pathThing := field.ToPath(opts...) pathThing := field.ToPath(opts...)
parsedSelector, err := parse(selector, pathThing) parsedSelector, err := parse(selector, parseType, pathThing)
if err == nil { if err == nil {
return parsedSelector, nil return parsedSelector, nil
} }
@ -827,8 +907,8 @@ func Parse(selector string, opts ...field.PathOption) (Selector, error) {
// The callers of this method can then decide how to return the internalSelector struct to their // The callers of this method can then decide how to return the internalSelector struct to their
// callers. This function has two callers now, one returns a Selector interface and the other // callers. This function has two callers now, one returns a Selector interface and the other
// returns a list of requirements. // returns a list of requirements.
func parse(selector string, path *field.Path) (internalSelector, error) { func parse(selector string, parseType string, path *field.Path) (internalSelector, error) {
p := &Parser{l: &Lexer{s: selector, pos: 0}, path: path} p := &Parser{l: &Lexer{s: selector, pos: 0}, parseType: parseType, path: path}
items, err := p.parse() items, err := p.parse()
if err != nil { if err != nil {
return nil, err return nil, err
@ -883,8 +963,8 @@ func SelectorFromValidatedSet(ls Set) Selector {
// processing on selector requirements. // processing on selector requirements.
// See the documentation for Parse() function for more details. // See the documentation for Parse() function for more details.
// TODO: Consider exporting the internalSelector type instead. // TODO: Consider exporting the internalSelector type instead.
func ParseToRequirements(selector string, opts ...field.PathOption) ([]Requirement, error) { func ParseToRequirements(selector string, parseType string, opts ...field.PathOption) ([]Requirement, error) {
return parse(selector, field.ToPath(opts...)) return parse(selector, parseType, field.ToPath(opts...))
} }
// ValidatedSetSelector wraps a Set, allowing it to implement the Selector interface. Unlike // ValidatedSetSelector wraps a Set, allowing it to implement the Selector interface. Unlike

View File

@ -27,15 +27,9 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/rancher/steve/pkg/stores/sqlpartition/selection" "github.com/rancher/steve/pkg/stores/sqlpartition/selection"
"github.com/stretchr/testify/assert"
"k8s.io/apimachinery/pkg/util/sets" "k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apimachinery/pkg/util/validation/field"
)
var (
ignoreDetail = cmpopts.IgnoreFields(field.Error{}, "Detail")
) )
func TestSelectorParse(t *testing.T) { func TestSelectorParse(t *testing.T) {
@ -54,6 +48,9 @@ func TestSelectorParse(t *testing.T) {
"metadata.labels[im.here]", "metadata.labels[im.here]",
"!metadata.labels[im.not.here]", "!metadata.labels[im.not.here]",
"metadata.labels[k8s.io/meta-stuff] ~ has-dashes_underscores.dots.only", "metadata.labels[k8s.io/meta-stuff] ~ has-dashes_underscores.dots.only",
"metadata.labels[k8s.io/meta-stuff] => [management.cattle.io/v3][tokens][id][metadata.state.name] = active",
"name => [management.cattle.io/v3][tokens][id][metadata.state.name] = active",
"metadata.annotations[blah] => [management.cattle.io/v3][tokens][id][metadata.state.name] = active",
} }
testBadStrings := []string{ testBadStrings := []string{
"!no-label-absence-test", "!no-label-absence-test",
@ -77,15 +74,22 @@ func TestSelectorParse(t *testing.T) {
"!metadata.labels(im.not.here)", "!metadata.labels(im.not.here)",
`x="no double quotes allowed"`, `x="no double quotes allowed"`,
`x='no single quotes allowed'`, `x='no single quotes allowed'`,
"metadata.labels[k8s.io/meta-stuff] => not-bracketed = active",
"metadata.labels[k8s.io/meta-stuff] => [not][enough][accessors] = active",
"metadata.labels[k8s.io/meta-stuff] => [too][many][accessors][by][1] = active",
"metadata.labels[k8s.io/meta-stuff] => [missing][an][operator][end-of-string]",
"metadata.labels[k8s.io/meta-stuff] => [missing][an][operator][no-following-operator] no-operator",
"metadata.labels[k8s.io/meta-stuff] => [missing][a][post-operator][value] >",
"metadata.labels[not/followed/by/accessor] => = active",
} }
for _, test := range testGoodStrings { for _, test := range testGoodStrings {
_, err := Parse(test) _, err := Parse(test, "filter")
if err != nil { if err != nil {
t.Errorf("%v: error %v (%#v)\n", test, err, err) t.Errorf("%v: error %v (%#v)\n", test, err, err)
} }
} }
for _, test := range testBadStrings { for _, test := range testBadStrings {
_, err := Parse(test) _, err := Parse(test, "filter")
if err == nil { if err == nil {
t.Errorf("%v: did not get expected error\n", test) t.Errorf("%v: did not get expected error\n", test)
} }
@ -115,6 +119,7 @@ func TestLexer(t *testing.T) {
{"~", PartialEqualsToken}, {"~", PartialEqualsToken},
{"!~", NotPartialEqualsToken}, {"!~", NotPartialEqualsToken},
{"||", ErrorToken}, {"||", ErrorToken},
{"=>", IndirectAccessToken},
} }
for _, v := range testcases { for _, v := range testcases {
l := &Lexer{s: v.s, pos: 0} l := &Lexer{s: v.s, pos: 0}
@ -163,6 +168,9 @@ func TestLexerSequence(t *testing.T) {
{"key!~ value", []Token{IdentifierToken, NotPartialEqualsToken, IdentifierToken}}, {"key!~ value", []Token{IdentifierToken, NotPartialEqualsToken, IdentifierToken}},
{"key !~value", []Token{IdentifierToken, NotPartialEqualsToken, IdentifierToken}}, {"key !~value", []Token{IdentifierToken, NotPartialEqualsToken, IdentifierToken}},
{"key!~value", []Token{IdentifierToken, NotPartialEqualsToken, IdentifierToken}}, {"key!~value", []Token{IdentifierToken, NotPartialEqualsToken, IdentifierToken}},
{"metadata.labels[k8s.io/meta-stuff] => [management.cattle.io/v3][tokens][id][metadata.state.name] = active",
[]Token{IdentifierToken, IndirectAccessToken, IdentifierToken, EqualsToken, IdentifierToken},
},
} }
for _, v := range testcases { for _, v := range testcases {
var tokens []Token var tokens []Token
@ -203,6 +211,10 @@ func TestParserLookahead(t *testing.T) {
{"key gt 3", []Token{IdentifierToken, GreaterThanToken, IdentifierToken, EndOfStringToken}}, {"key gt 3", []Token{IdentifierToken, GreaterThanToken, IdentifierToken, EndOfStringToken}},
{"key lt 4", []Token{IdentifierToken, LessThanToken, IdentifierToken, EndOfStringToken}}, {"key lt 4", []Token{IdentifierToken, LessThanToken, IdentifierToken, EndOfStringToken}},
{`key = multi-word-string`, []Token{IdentifierToken, EqualsToken, QuotedStringToken, EndOfStringToken}}, {`key = multi-word-string`, []Token{IdentifierToken, EqualsToken, QuotedStringToken, EndOfStringToken}},
{"metadata.labels[k8s.io/meta-stuff] => [management.cattle.io/v3][tokens][id][metadata.state.name] = active",
[]Token{IdentifierToken, IndirectAccessToken, IdentifierToken, EqualsToken, IdentifierToken, EndOfStringToken},
},
} }
for _, v := range testcases { for _, v := range testcases {
p := &Parser{l: &Lexer{s: v.s, pos: 0}, position: 0} p := &Parser{l: &Lexer{s: v.s, pos: 0}, position: 0}
@ -240,6 +252,7 @@ func TestParseOperator(t *testing.T) {
{"notin", nil}, {"notin", nil},
{"!=", nil}, {"!=", nil},
{"!~", nil}, {"!~", nil},
{"=>", nil},
{"!", fmt.Errorf("found '%s', expected: %v", selection.DoesNotExist, strings.Join(binaryOperators, ", "))}, {"!", fmt.Errorf("found '%s', expected: %v", selection.DoesNotExist, strings.Join(binaryOperators, ", "))},
{"exists", fmt.Errorf("found '%s', expected: %v", selection.Exists, strings.Join(binaryOperators, ", "))}, {"exists", fmt.Errorf("found '%s', expected: %v", selection.Exists, strings.Join(binaryOperators, ", "))},
{"(", fmt.Errorf("found '%s', expected: %v", "(", strings.Join(binaryOperators, ", "))}, {"(", fmt.Errorf("found '%s', expected: %v", "(", strings.Join(binaryOperators, ", "))},
@ -262,30 +275,18 @@ func TestRequirementConstructor(t *testing.T) {
Key string Key string
Op selection.Operator Op selection.Operator
Vals sets.String Vals sets.String
WantErr field.ErrorList WantErr string
}{ }{
{ {
Key: "x1", Key: "x1",
Op: selection.In, Op: selection.In,
WantErr: field.ErrorList{ WantErr: "values: Invalid value: []string{}: for 'in', 'notin' operators, values set can't be empty",
&field.Error{
Type: field.ErrorTypeInvalid,
Field: "values",
BadValue: []string{},
},
},
}, },
{ {
Key: "x2", Key: "x2",
Op: selection.NotIn, Op: selection.NotIn,
Vals: sets.NewString(), Vals: sets.NewString(),
WantErr: field.ErrorList{ WantErr: "values: Invalid value: []string{}: for 'in', 'notin' operators, values set can't be empty",
&field.Error{
Type: field.ErrorTypeInvalid,
Field: "values",
BadValue: []string{},
},
},
}, },
{ {
Key: "x3", Key: "x3",
@ -298,16 +299,10 @@ func TestRequirementConstructor(t *testing.T) {
Vals: sets.NewString("foo"), Vals: sets.NewString("foo"),
}, },
{ {
Key: "x5", Key: "x5",
Op: selection.Equals, Op: selection.Equals,
Vals: sets.NewString("foo", "bar"), Vals: sets.NewString("foo", "bar"),
WantErr: field.ErrorList{ WantErr: "values: Invalid value: []string{\"bar\", \"foo\"}: exact-match compatibility requires one single value",
&field.Error{
Type: field.ErrorTypeInvalid,
Field: "values",
BadValue: []string{"bar", "foo"},
},
},
}, },
{ {
Key: "x6", Key: "x6",
@ -318,16 +313,10 @@ func TestRequirementConstructor(t *testing.T) {
Op: selection.DoesNotExist, Op: selection.DoesNotExist,
}, },
{ {
Key: "x8", Key: "x8",
Op: selection.Exists, Op: selection.Exists,
Vals: sets.NewString("foo"), Vals: sets.NewString("foo"),
WantErr: field.ErrorList{ WantErr: `values: Invalid value: []string{"foo"}: values set must be empty for exists and does not exist`,
&field.Error{
Type: field.ErrorTypeInvalid,
Field: "values",
BadValue: []string{"foo"},
},
},
}, },
{ {
Key: "x9", Key: "x9",
@ -350,39 +339,21 @@ func TestRequirementConstructor(t *testing.T) {
Vals: sets.NewString("6"), Vals: sets.NewString("6"),
}, },
{ {
Key: "x13", Key: "x13",
Op: selection.GreaterThan, Op: selection.GreaterThan,
WantErr: field.ErrorList{ WantErr: "values: Invalid value: []string{}: for 'Gt', 'Lt' operators, exactly one value is required",
&field.Error{
Type: field.ErrorTypeInvalid,
Field: "values",
BadValue: []string{},
},
},
}, },
{ {
Key: "x14", Key: "x14",
Op: selection.GreaterThan, Op: selection.GreaterThan,
Vals: sets.NewString("bar"), Vals: sets.NewString("bar"),
WantErr: field.ErrorList{ WantErr: `values[0]: Invalid value: "bar": for 'Gt', 'Lt' operators, the value must be an integer`,
&field.Error{
Type: field.ErrorTypeInvalid,
Field: "values[0]",
BadValue: "bar",
},
},
}, },
{ {
Key: "x15", Key: "x15",
Op: selection.LessThan, Op: selection.LessThan,
Vals: sets.NewString("bar"), Vals: sets.NewString("bar"),
WantErr: field.ErrorList{ WantErr: `values[0]: Invalid value: "bar": for 'Gt', 'Lt' operators, the value must be an integer`,
&field.Error{
Type: field.ErrorTypeInvalid,
Field: "values[0]",
BadValue: "bar",
},
},
}, },
{ {
Key: strings.Repeat("a", 254), //breaks DNS rule that len(key) <= 253 Key: strings.Repeat("a", 254), //breaks DNS rule that len(key) <= 253
@ -399,21 +370,29 @@ func TestRequirementConstructor(t *testing.T) {
Vals: sets.NewString("a b"), Vals: sets.NewString("a b"),
}, },
{ {
Key: "x18", Key: "x18",
Op: "unsupportedOp", Op: "unsupportedOp",
WantErr: field.ErrorList{ WantErr: `operator: Unsupported value: "unsupportedOp": supported values: "in", "notin", "=", "==", "!=", "~", "!~", "gt", "lt", "=>", "exists", "!"`,
&field.Error{
Type: field.ErrorTypeNotSupported,
Field: "operator",
BadValue: selection.Operator("unsupportedOp"),
},
},
}, },
} }
for _, rc := range requirementConstructorTests { for _, rc := range requirementConstructorTests {
_, err := NewRequirement(rc.Key, rc.Op, rc.Vals.List()) _, err := NewRequirement(rc.Key, rc.Op, rc.Vals.List())
if diff := cmp.Diff(rc.WantErr.ToAggregate(), err, ignoreDetail); diff != "" { if rc.WantErr != "" {
t.Errorf("NewRequirement test %v returned unexpected error (-want,+got):\n%s", rc.Key, diff) assert.NotNil(t, err)
if err != nil {
assert.Equal(t, rc.WantErr, err.Error())
}
} else {
assert.Nil(t, err)
}
_, err = NewIndirectRequirement(rc.Key, []string{"herb", "job", "nice", "reading"}, &rc.Op, rc.Vals.List())
if rc.WantErr != "" {
assert.NotNil(t, err)
if err != nil {
assert.Equal(t, rc.WantErr, err.Error())
}
} else {
assert.Nil(t, err)
} }
} }
} }

View File

@ -38,4 +38,5 @@ const (
Exists Operator = "exists" Exists Operator = "exists"
GreaterThan Operator = "gt" GreaterThan Operator = "gt"
LessThan Operator = "lt" LessThan Operator = "lt"
IndirectSelector Operator = "=>"
) )

View File

@ -14,9 +14,9 @@ import (
reflect "reflect" reflect "reflect"
types "github.com/rancher/apiserver/pkg/types" types "github.com/rancher/apiserver/pkg/types"
informer "github.com/rancher/steve/pkg/sqlcache/informer"
factory "github.com/rancher/steve/pkg/sqlcache/informer/factory" factory "github.com/rancher/steve/pkg/sqlcache/informer/factory"
partition "github.com/rancher/steve/pkg/sqlcache/partition" partition "github.com/rancher/steve/pkg/sqlcache/partition"
sqltypes "github.com/rancher/steve/pkg/sqlcache/sqltypes"
summary "github.com/rancher/wrangler/v3/pkg/summary" summary "github.com/rancher/wrangler/v3/pkg/summary"
gomock "go.uber.org/mock/gomock" gomock "go.uber.org/mock/gomock"
unstructured "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" unstructured "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
@ -51,7 +51,7 @@ func (m *MockCache) EXPECT() *MockCacheMockRecorder {
} }
// ListByOptions mocks base method. // ListByOptions mocks base method.
func (m *MockCache) ListByOptions(arg0 context.Context, arg1 informer.ListOptions, arg2 []partition.Partition, arg3 string) (*unstructured.UnstructuredList, int, string, error) { func (m *MockCache) ListByOptions(arg0 context.Context, arg1 *sqltypes.ListOptions, arg2 []partition.Partition, arg3 string) (*unstructured.UnstructuredList, int, string, error) {
m.ctrl.T.Helper() m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListByOptions", arg0, arg1, arg2, arg3) ret := m.ctrl.Call(m, "ListByOptions", arg0, arg1, arg2, arg3)
ret0, _ := ret[0].(*unstructured.UnstructuredList) ret0, _ := ret[0].(*unstructured.UnstructuredList)

View File

@ -36,6 +36,7 @@ import (
"github.com/rancher/steve/pkg/sqlcache/informer" "github.com/rancher/steve/pkg/sqlcache/informer"
"github.com/rancher/steve/pkg/sqlcache/informer/factory" "github.com/rancher/steve/pkg/sqlcache/informer/factory"
"github.com/rancher/steve/pkg/sqlcache/partition" "github.com/rancher/steve/pkg/sqlcache/partition"
"github.com/rancher/steve/pkg/sqlcache/sqltypes"
"github.com/rancher/wrangler/v3/pkg/data" "github.com/rancher/wrangler/v3/pkg/data"
"github.com/rancher/wrangler/v3/pkg/schemas" "github.com/rancher/wrangler/v3/pkg/schemas"
"github.com/rancher/wrangler/v3/pkg/schemas/validation" "github.com/rancher/wrangler/v3/pkg/schemas/validation"
@ -216,7 +217,7 @@ type Cache interface {
// - the total number of resources (returned list might be a subset depending on pagination options in lo) // - the total number of resources (returned list might be a subset depending on pagination options in lo)
// - a continue token, if there are more pages after the returned one // - a continue token, if there are more pages after the returned one
// - an error instead of all of the above if anything went wrong // - an error instead of all of the above if anything went wrong
ListByOptions(ctx context.Context, lo informer.ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error) ListByOptions(ctx context.Context, lo *sqltypes.ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error)
} }
// WarningBuffer holds warnings that may be returned from the kubernetes api // WarningBuffer holds warnings that may be returned from the kubernetes api
@ -783,7 +784,7 @@ func (s *Store) ListByPartitions(apiOp *types.APIRequest, schema *types.APISchem
return nil, 0, "", err return nil, 0, "", err
} }
list, total, continueToken, err := inf.ListByOptions(apiOp.Context(), opts, partitions, apiOp.Namespace) list, total, continueToken, err := inf.ListByOptions(apiOp.Context(), &opts, partitions, apiOp.Namespace)
if err != nil { if err != nil {
if errors.Is(err, informer.ErrInvalidColumn) { if errors.Is(err, informer.ErrInvalidColumn) {
return nil, 0, "", apierror.NewAPIError(validation.InvalidBodyContent, err.Error()) return nil, 0, "", apierror.NewAPIError(validation.InvalidBodyContent, err.Error())

View File

@ -244,7 +244,7 @@ func TestListByPartitions(t *testing.T) {
// This tests that fields are being extracted from schema columns and the type specific fields map // This tests that fields are being extracted from schema columns and the type specific fields map
cf.EXPECT().CacheFor(context.Background(), [][]string{{"some", "field"}, {`id`}, {`metadata`, `state`, `name`}, {"gvk", "specific", "fields"}}, gomock.Any(), &tablelistconvert.Client{ResourceInterface: ri}, attributes.GVK(schema), attributes.Namespaced(schema), true).Return(c, nil) cf.EXPECT().CacheFor(context.Background(), [][]string{{"some", "field"}, {`id`}, {`metadata`, `state`, `name`}, {"gvk", "specific", "fields"}}, gomock.Any(), &tablelistconvert.Client{ResourceInterface: ri}, attributes.GVK(schema), attributes.Namespaced(schema), true).Return(c, nil)
tb.EXPECT().GetTransformFunc(attributes.GVK(schema)).Return(func(obj interface{}) (interface{}, error) { return obj, nil }) tb.EXPECT().GetTransformFunc(attributes.GVK(schema)).Return(func(obj interface{}) (interface{}, error) { return obj, nil })
bloi.EXPECT().ListByOptions(req.Context(), opts, partitions, req.Namespace).Return(listToReturn, len(listToReturn.Items), "", nil) bloi.EXPECT().ListByOptions(req.Context(), &opts, partitions, req.Namespace).Return(listToReturn, len(listToReturn.Items), "", nil)
list, total, contToken, err := s.ListByPartitions(req, schema, partitions) list, total, contToken, err := s.ListByPartitions(req, schema, partitions)
assert.Nil(t, err) assert.Nil(t, err)
assert.Equal(t, expectedItems, list) assert.Equal(t, expectedItems, list)
@ -461,7 +461,7 @@ func TestListByPartitions(t *testing.T) {
cf.EXPECT().CacheFor(context.Background(), [][]string{{"some", "field"}, {`id`}, {`metadata`, `state`, `name`}, {"gvk", "specific", "fields"}}, gomock.Any(), &tablelistconvert.Client{ResourceInterface: ri}, attributes.GVK(schema), attributes.Namespaced(schema), false).Return(c, nil) cf.EXPECT().CacheFor(context.Background(), [][]string{{"some", "field"}, {`id`}, {`metadata`, `state`, `name`}, {"gvk", "specific", "fields"}}, gomock.Any(), &tablelistconvert.Client{ResourceInterface: ri}, attributes.GVK(schema), attributes.Namespaced(schema), false).Return(c, nil)
tb.EXPECT().GetTransformFunc(attributes.GVK(schema)).Return(func(obj interface{}) (interface{}, error) { return obj, nil }) tb.EXPECT().GetTransformFunc(attributes.GVK(schema)).Return(func(obj interface{}) (interface{}, error) { return obj, nil })
bloi.EXPECT().ListByOptions(req.Context(), opts, partitions, req.Namespace).Return(listToReturn, len(listToReturn.Items), "", nil) bloi.EXPECT().ListByOptions(req.Context(), &opts, partitions, req.Namespace).Return(listToReturn, len(listToReturn.Items), "", nil)
list, total, contToken, err := s.ListByPartitions(req, schema, partitions) list, total, contToken, err := s.ListByPartitions(req, schema, partitions)
assert.Nil(t, err) assert.Nil(t, err)
assert.Equal(t, expectedItems, list) assert.Equal(t, expectedItems, list)
@ -610,7 +610,7 @@ func TestListByPartitions(t *testing.T) {
cg.EXPECT().TableAdminClient(req, schema, "", &WarningBuffer{}).Return(ri, nil) cg.EXPECT().TableAdminClient(req, schema, "", &WarningBuffer{}).Return(ri, nil)
// This tests that fields are being extracted from schema columns and the type specific fields map // This tests that fields are being extracted from schema columns and the type specific fields map
cf.EXPECT().CacheFor(context.Background(), [][]string{{"some", "field"}, {`id`}, {`metadata`, `state`, `name`}, {"gvk", "specific", "fields"}}, gomock.Any(), &tablelistconvert.Client{ResourceInterface: ri}, attributes.GVK(schema), attributes.Namespaced(schema), true).Return(c, nil) cf.EXPECT().CacheFor(context.Background(), [][]string{{"some", "field"}, {`id`}, {`metadata`, `state`, `name`}, {"gvk", "specific", "fields"}}, gomock.Any(), &tablelistconvert.Client{ResourceInterface: ri}, attributes.GVK(schema), attributes.Namespaced(schema), true).Return(c, nil)
bloi.EXPECT().ListByOptions(req.Context(), opts, partitions, req.Namespace).Return(nil, 0, "", fmt.Errorf("error")) bloi.EXPECT().ListByOptions(req.Context(), &opts, partitions, req.Namespace).Return(nil, 0, "", fmt.Errorf("error"))
tb.EXPECT().GetTransformFunc(attributes.GVK(schema)).Return(func(obj interface{}) (interface{}, error) { return obj, nil }) tb.EXPECT().GetTransformFunc(attributes.GVK(schema)).Return(func(obj interface{}) (interface{}, error) { return obj, nil })
_, _, _, err = s.ListByPartitions(req, schema, partitions) _, _, _, err = s.ListByPartitions(req, schema, partitions)

View File

@ -13,8 +13,8 @@ import (
context "context" context "context"
reflect "reflect" reflect "reflect"
informer "github.com/rancher/steve/pkg/sqlcache/informer"
partition "github.com/rancher/steve/pkg/sqlcache/partition" partition "github.com/rancher/steve/pkg/sqlcache/partition"
sqltypes "github.com/rancher/steve/pkg/sqlcache/sqltypes"
gomock "go.uber.org/mock/gomock" gomock "go.uber.org/mock/gomock"
unstructured "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" unstructured "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
) )
@ -43,7 +43,7 @@ func (m *MockByOptionsLister) EXPECT() *MockByOptionsListerMockRecorder {
} }
// ListByOptions mocks base method. // ListByOptions mocks base method.
func (m *MockByOptionsLister) ListByOptions(arg0 context.Context, arg1 informer.ListOptions, arg2 []partition.Partition, arg3 string) (*unstructured.UnstructuredList, int, string, error) { func (m *MockByOptionsLister) ListByOptions(arg0 context.Context, arg1 *sqltypes.ListOptions, arg2 []partition.Partition, arg3 string) (*unstructured.UnstructuredList, int, string, error) {
m.ctrl.T.Helper() m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListByOptions", arg0, arg1, arg2, arg3) ret := m.ctrl.Call(m, "ListByOptions", arg0, arg1, arg2, arg3)
ret0, _ := ret[0].(*unstructured.UnstructuredList) ret0, _ := ret[0].(*unstructured.UnstructuredList)