mirror of
https://github.com/rancher/steve.git
synced 2025-09-09 03:09:50 +00:00
Move types related to list options and sql queries into their own package. (#610)
The problem having these in the informer package is that eventually code in other packages will need to import `informer` only for constants or types, but some members of the informer package may already depend on those. Best to move type definitions into their own simpler package.
This commit is contained in:
@@ -10,8 +10,8 @@ import (
|
||||
|
||||
"github.com/rancher/apiserver/pkg/apierror"
|
||||
"github.com/rancher/apiserver/pkg/types"
|
||||
"github.com/rancher/steve/pkg/sqlcache/informer"
|
||||
"github.com/rancher/steve/pkg/sqlcache/partition"
|
||||
"github.com/rancher/steve/pkg/sqlcache/sqltypes"
|
||||
"github.com/rancher/steve/pkg/stores/queryhelper"
|
||||
"github.com/rancher/steve/pkg/stores/sqlpartition/queryparser"
|
||||
"github.com/rancher/steve/pkg/stores/sqlpartition/selection"
|
||||
@@ -36,27 +36,27 @@ const (
|
||||
)
|
||||
|
||||
var endsWithBracket = regexp.MustCompile(`^(.+)\[(.+)]$`)
|
||||
var mapK8sOpToRancherOp = map[selection.Operator]informer.Op{
|
||||
selection.Equals: informer.Eq,
|
||||
selection.DoubleEquals: informer.Eq,
|
||||
selection.PartialEquals: informer.Eq,
|
||||
selection.NotEquals: informer.NotEq,
|
||||
selection.NotPartialEquals: informer.NotEq,
|
||||
selection.In: informer.In,
|
||||
selection.NotIn: informer.NotIn,
|
||||
selection.Exists: informer.Exists,
|
||||
selection.DoesNotExist: informer.NotExists,
|
||||
selection.LessThan: informer.Lt,
|
||||
selection.GreaterThan: informer.Gt,
|
||||
var mapK8sOpToRancherOp = map[selection.Operator]sqltypes.Op{
|
||||
selection.Equals: sqltypes.Eq,
|
||||
selection.DoubleEquals: sqltypes.Eq,
|
||||
selection.PartialEquals: sqltypes.Eq,
|
||||
selection.NotEquals: sqltypes.NotEq,
|
||||
selection.NotPartialEquals: sqltypes.NotEq,
|
||||
selection.In: sqltypes.In,
|
||||
selection.NotIn: sqltypes.NotIn,
|
||||
selection.Exists: sqltypes.Exists,
|
||||
selection.DoesNotExist: sqltypes.NotExists,
|
||||
selection.LessThan: sqltypes.Lt,
|
||||
selection.GreaterThan: sqltypes.Gt,
|
||||
}
|
||||
|
||||
// ListOptions represents the query parameters that may be included in a list request.
|
||||
type ListOptions struct {
|
||||
ChunkSize int
|
||||
Resume string
|
||||
Filters []informer.OrFilter
|
||||
Sort informer.Sort
|
||||
Pagination informer.Pagination
|
||||
Filters []sqltypes.OrFilter
|
||||
Sort sqltypes.Sort
|
||||
Pagination sqltypes.Pagination
|
||||
}
|
||||
|
||||
type Cache interface {
|
||||
@@ -66,10 +66,10 @@ type Cache interface {
|
||||
// - the total number of resources (returned list might be a subset depending on pagination options in lo)
|
||||
// - a continue token, if there are more pages after the returned one
|
||||
// - an error instead of all of the above if anything went wrong
|
||||
ListByOptions(ctx context.Context, lo informer.ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error)
|
||||
ListByOptions(ctx context.Context, lo sqltypes.ListOptions, partitions []partition.Partition, namespace string) (*unstructured.UnstructuredList, int, string, error)
|
||||
}
|
||||
|
||||
func k8sOpToRancherOp(k8sOp selection.Operator) (informer.Op, bool, error) {
|
||||
func k8sOpToRancherOp(k8sOp selection.Operator) (sqltypes.Op, bool, error) {
|
||||
v, ok := mapK8sOpToRancherOp[k8sOp]
|
||||
if ok {
|
||||
return v, k8sOp == selection.PartialEquals || k8sOp == selection.NotPartialEquals, nil
|
||||
@@ -77,11 +77,11 @@ func k8sOpToRancherOp(k8sOp selection.Operator) (informer.Op, bool, error) {
|
||||
return "", false, fmt.Errorf("unknown k8sOp: %s", k8sOp)
|
||||
}
|
||||
|
||||
func k8sRequirementToOrFilter(requirement queryparser.Requirement) (informer.Filter, error) {
|
||||
func k8sRequirementToOrFilter(requirement queryparser.Requirement) (sqltypes.Filter, error) {
|
||||
values := requirement.Values()
|
||||
queryFields := splitQuery(requirement.Key())
|
||||
op, usePartialMatch, err := k8sOpToRancherOp(requirement.Operator())
|
||||
return informer.Filter{
|
||||
return sqltypes.Filter{
|
||||
Field: queryFields,
|
||||
Matches: values,
|
||||
Op: op,
|
||||
@@ -90,8 +90,8 @@ func k8sRequirementToOrFilter(requirement queryparser.Requirement) (informer.Fil
|
||||
}
|
||||
|
||||
// ParseQuery parses the query params of a request and returns a ListOptions.
|
||||
func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (informer.ListOptions, error) {
|
||||
opts := informer.ListOptions{}
|
||||
func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (sqltypes.ListOptions, error) {
|
||||
opts := sqltypes.ListOptions{}
|
||||
|
||||
opts.ChunkSize = getLimit(apiOp)
|
||||
|
||||
@@ -100,13 +100,13 @@ func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (informer.ListOpt
|
||||
opts.Resume = cont
|
||||
|
||||
filterParams := q[filterParam]
|
||||
filterOpts := []informer.OrFilter{}
|
||||
filterOpts := []sqltypes.OrFilter{}
|
||||
for _, filters := range filterParams {
|
||||
requirements, err := queryparser.ParseToRequirements(filters)
|
||||
if err != nil {
|
||||
return informer.ListOptions{}, err
|
||||
return sqltypes.ListOptions{}, err
|
||||
}
|
||||
orFilter := informer.OrFilter{}
|
||||
orFilter := sqltypes.OrFilter{}
|
||||
for _, requirement := range requirements {
|
||||
filter, err := k8sRequirementToOrFilter(requirement)
|
||||
if err != nil {
|
||||
@@ -118,16 +118,16 @@ func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (informer.ListOpt
|
||||
}
|
||||
opts.Filters = filterOpts
|
||||
|
||||
sortOpts := informer.Sort{}
|
||||
sortOpts := sqltypes.Sort{}
|
||||
sortKeys := q.Get(sortParam)
|
||||
if sortKeys != "" {
|
||||
sortParts := strings.Split(sortKeys, ",")
|
||||
for _, sortPart := range sortParts {
|
||||
field := sortPart
|
||||
if len(field) > 0 {
|
||||
sortOrder := informer.ASC
|
||||
sortOrder := sqltypes.ASC
|
||||
if field[0] == '-' {
|
||||
sortOrder = informer.DESC
|
||||
sortOrder = sqltypes.DESC
|
||||
field = field[1:]
|
||||
}
|
||||
if len(field) > 0 {
|
||||
@@ -140,7 +140,7 @@ func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (informer.ListOpt
|
||||
opts.Sort = sortOpts
|
||||
|
||||
var err error
|
||||
pagination := informer.Pagination{}
|
||||
pagination := sqltypes.Pagination{}
|
||||
pagination.PageSize, err = strconv.Atoi(q.Get(pageSizeParam))
|
||||
if err != nil {
|
||||
pagination.PageSize = 0
|
||||
@@ -151,12 +151,12 @@ func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (informer.ListOpt
|
||||
}
|
||||
opts.Pagination = pagination
|
||||
|
||||
op := informer.Eq
|
||||
op := sqltypes.Eq
|
||||
projectsOrNamespaces := q.Get(projectsOrNamespacesVar)
|
||||
if projectsOrNamespaces == "" {
|
||||
projectsOrNamespaces = q.Get(projectsOrNamespacesVar + notOp)
|
||||
if projectsOrNamespaces != "" {
|
||||
op = informer.NotEq
|
||||
op = sqltypes.NotEq
|
||||
}
|
||||
}
|
||||
if projectsOrNamespaces != "" {
|
||||
@@ -167,12 +167,12 @@ func ParseQuery(apiOp *types.APIRequest, namespaceCache Cache) (informer.ListOpt
|
||||
if projOrNSFilters == nil {
|
||||
return opts, apierror.NewAPIError(validation.NotFound, fmt.Sprintf("could not find any namespaces named [%s] or namespaces belonging to project named [%s]", projectsOrNamespaces, projectsOrNamespaces))
|
||||
}
|
||||
if op == informer.NotEq {
|
||||
if op == sqltypes.NotEq {
|
||||
for _, filter := range projOrNSFilters {
|
||||
opts.Filters = append(opts.Filters, informer.OrFilter{Filters: []informer.Filter{filter}})
|
||||
opts.Filters = append(opts.Filters, sqltypes.OrFilter{Filters: []sqltypes.Filter{filter}})
|
||||
}
|
||||
} else {
|
||||
opts.Filters = append(opts.Filters, informer.OrFilter{Filters: projOrNSFilters})
|
||||
opts.Filters = append(opts.Filters, sqltypes.OrFilter{Filters: projOrNSFilters})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -205,22 +205,22 @@ func splitQuery(query string) []string {
|
||||
return strings.Split(query, ".")
|
||||
}
|
||||
|
||||
func parseNamespaceOrProjectFilters(ctx context.Context, projOrNS string, op informer.Op, namespaceInformer Cache) ([]informer.Filter, error) {
|
||||
var filters []informer.Filter
|
||||
func parseNamespaceOrProjectFilters(ctx context.Context, projOrNS string, op sqltypes.Op, namespaceInformer Cache) ([]sqltypes.Filter, error) {
|
||||
var filters []sqltypes.Filter
|
||||
for _, pn := range strings.Split(projOrNS, ",") {
|
||||
uList, _, _, err := namespaceInformer.ListByOptions(ctx, informer.ListOptions{
|
||||
Filters: []informer.OrFilter{
|
||||
uList, _, _, err := namespaceInformer.ListByOptions(ctx, sqltypes.ListOptions{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "name"},
|
||||
Matches: []string{pn},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
},
|
||||
{
|
||||
Field: []string{"metadata", "labels", "field.cattle.io/projectId"},
|
||||
Matches: []string{pn},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -230,7 +230,7 @@ func parseNamespaceOrProjectFilters(ctx context.Context, projOrNS string, op inf
|
||||
return filters, err
|
||||
}
|
||||
for _, item := range uList.Items {
|
||||
filters = append(filters, informer.Filter{
|
||||
filters = append(filters, sqltypes.Filter{
|
||||
Field: []string{"metadata", "namespace"},
|
||||
Matches: []string{item.GetName()},
|
||||
Op: op,
|
||||
|
@@ -8,8 +8,8 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/rancher/apiserver/pkg/types"
|
||||
"github.com/rancher/steve/pkg/sqlcache/informer"
|
||||
"github.com/rancher/steve/pkg/sqlcache/partition"
|
||||
"github.com/rancher/steve/pkg/sqlcache/sqltypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"go.uber.org/mock/gomock"
|
||||
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
|
||||
@@ -23,7 +23,7 @@ func TestParseQuery(t *testing.T) {
|
||||
setupNSCache func() Cache
|
||||
nsc Cache
|
||||
req *types.APIRequest
|
||||
expectedLO informer.ListOptions
|
||||
expectedLO sqltypes.ListOptions
|
||||
errExpected bool
|
||||
errorText string
|
||||
}
|
||||
@@ -35,10 +35,10 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: ""},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: make([]informer.OrFilter, 0),
|
||||
Pagination: informer.Pagination{
|
||||
Filters: make([]sqltypes.OrFilter, 0),
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -51,21 +51,21 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "projectsornamespaces=somethin"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "namespace"},
|
||||
Matches: []string{"ns1"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -82,19 +82,19 @@ func TestParseQuery(t *testing.T) {
|
||||
},
|
||||
}
|
||||
nsc := NewMockCache(gomock.NewController(t))
|
||||
nsc.EXPECT().ListByOptions(context.Background(), informer.ListOptions{
|
||||
Filters: []informer.OrFilter{
|
||||
nsc.EXPECT().ListByOptions(context.Background(), sqltypes.ListOptions{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "name"},
|
||||
Matches: []string{"somethin"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
},
|
||||
{
|
||||
Field: []string{"metadata", "labels", "field.cattle.io/projectId"},
|
||||
Matches: []string{"somethin"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -111,40 +111,40 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "projectsornamespaces=somethin"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "namespace"},
|
||||
Matches: []string{"ns1"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
errExpected: true,
|
||||
setupNSCache: func() Cache {
|
||||
nsi := NewMockCache(gomock.NewController(t))
|
||||
nsi.EXPECT().ListByOptions(context.Background(), informer.ListOptions{
|
||||
Filters: []informer.OrFilter{
|
||||
nsi.EXPECT().ListByOptions(context.Background(), sqltypes.ListOptions{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "name"},
|
||||
Matches: []string{"somethin"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
},
|
||||
{
|
||||
Field: []string{"metadata", "labels", "field.cattle.io/projectId"},
|
||||
Matches: []string{"somethin"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -161,21 +161,21 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "projectsornamespaces=somethin"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "namespace"},
|
||||
Matches: []string{"ns1"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -185,19 +185,19 @@ func TestParseQuery(t *testing.T) {
|
||||
Items: []unstructured.Unstructured{},
|
||||
}
|
||||
nsi := NewMockCache(gomock.NewController(t))
|
||||
nsi.EXPECT().ListByOptions(context.Background(), informer.ListOptions{
|
||||
Filters: []informer.OrFilter{
|
||||
nsi.EXPECT().ListByOptions(context.Background(), sqltypes.ListOptions{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "name"},
|
||||
Matches: []string{"somethin"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
},
|
||||
{
|
||||
Field: []string{"metadata", "labels", "field.cattle.io/projectId"},
|
||||
Matches: []string{"somethin"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -213,21 +213,21 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=a~c"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"a"},
|
||||
Matches: []string{"c"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -239,21 +239,21 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=a=c"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"a"},
|
||||
Matches: []string{"c"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -274,21 +274,21 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=metadata.labels[grover.example.com/fish]~heads"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "labels", "grover.example.com/fish"},
|
||||
Matches: []string{"heads"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -300,21 +300,21 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=metadata.annotations[chumley.example.com/fish]=seals"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "annotations", "chumley.example.com/fish"},
|
||||
Matches: []string{"seals"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -326,20 +326,20 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=metadata.fields[3]<5"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "fields", "3"},
|
||||
Matches: []string{"5"},
|
||||
Op: informer.Lt,
|
||||
Op: sqltypes.Lt,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -351,21 +351,21 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=metadata.labels[grover.example.com/fish]~heads"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "labels", "grover.example.com/fish"},
|
||||
Matches: []string{"heads"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -377,31 +377,31 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=a=c&filter=b=d"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"a"},
|
||||
Matches: []string{"c"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"b"},
|
||||
Matches: []string{"d"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -413,31 +413,31 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=a=c&filter=b=d"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"a"},
|
||||
Matches: []string{"c"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"b"},
|
||||
Matches: []string{"d"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -449,27 +449,27 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=beer=pabst,metadata.labels[beer2.io/ale] ~schlitz"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"beer"},
|
||||
Matches: []string{"pabst"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: false,
|
||||
},
|
||||
{
|
||||
Field: []string{"metadata", "labels", "beer2.io/ale"},
|
||||
Matches: []string{"schlitz"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -481,27 +481,27 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=beer=natty-bo,metadata.labels.beer3~rainier"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"beer"},
|
||||
Matches: []string{"natty-bo"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: false,
|
||||
},
|
||||
{
|
||||
Field: []string{"metadata", "labels", "beer3"},
|
||||
Matches: []string{"rainier"},
|
||||
Op: informer.Eq,
|
||||
Op: sqltypes.Eq,
|
||||
Partial: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -513,27 +513,27 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=a1In in (x1),a2In IN (x2)"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"a1In"},
|
||||
Matches: []string{"x1"},
|
||||
Op: informer.In,
|
||||
Op: sqltypes.In,
|
||||
Partial: false,
|
||||
},
|
||||
{
|
||||
Field: []string{"a2In"},
|
||||
Matches: []string{"x2"},
|
||||
Op: informer.In,
|
||||
Op: sqltypes.In,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -545,21 +545,21 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=a2In in (x2a, x2b)"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"a2In"},
|
||||
Matches: []string{"x2a", "x2b"},
|
||||
Op: informer.In,
|
||||
Op: sqltypes.In,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -571,27 +571,27 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=a1NotIn notin (x1),a2NotIn NOTIN (x2)"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"a1NotIn"},
|
||||
Matches: []string{"x1"},
|
||||
Op: informer.NotIn,
|
||||
Op: sqltypes.NotIn,
|
||||
Partial: false,
|
||||
},
|
||||
{
|
||||
Field: []string{"a2NotIn"},
|
||||
Matches: []string{"x2"},
|
||||
Op: informer.NotIn,
|
||||
Op: sqltypes.NotIn,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -603,21 +603,21 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=a3NotIn in (x3a, x3b)"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"a3NotIn"},
|
||||
Matches: []string{"x3a", "x3b"},
|
||||
Op: informer.In,
|
||||
Op: sqltypes.In,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -629,27 +629,27 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=a4In iN (x4a),a4NotIn nOtIn (x4b)"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"a4In"},
|
||||
Matches: []string{"x4a"},
|
||||
Op: informer.In,
|
||||
Op: sqltypes.In,
|
||||
Partial: false,
|
||||
},
|
||||
{
|
||||
Field: []string{"a4NotIn"},
|
||||
Matches: []string{"x4b"},
|
||||
Op: informer.NotIn,
|
||||
Op: sqltypes.NotIn,
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -671,33 +671,33 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=metadata.labels.a5In1,!metadata.labels.a5In2, ! metadata.labels.a5In3"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"metadata", "labels", "a5In1"},
|
||||
Op: informer.Exists,
|
||||
Op: sqltypes.Exists,
|
||||
Matches: []string{},
|
||||
Partial: false,
|
||||
},
|
||||
{
|
||||
Field: []string{"metadata", "labels", "a5In2"},
|
||||
Op: informer.NotExists,
|
||||
Op: sqltypes.NotExists,
|
||||
Matches: []string{},
|
||||
Partial: false,
|
||||
},
|
||||
{
|
||||
Field: []string{"metadata", "labels", "a5In3"},
|
||||
Op: informer.NotExists,
|
||||
Op: sqltypes.NotExists,
|
||||
Matches: []string{},
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -709,27 +709,27 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "filter=a<1,b>2"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: []informer.OrFilter{
|
||||
Filters: []sqltypes.OrFilter{
|
||||
{
|
||||
Filters: []informer.Filter{
|
||||
Filters: []sqltypes.Filter{
|
||||
{
|
||||
Field: []string{"a"},
|
||||
Op: informer.Lt,
|
||||
Op: sqltypes.Lt,
|
||||
Matches: []string{"1"},
|
||||
Partial: false,
|
||||
},
|
||||
{
|
||||
Field: []string{"b"},
|
||||
Op: informer.Gt,
|
||||
Op: sqltypes.Gt,
|
||||
Matches: []string{"2"},
|
||||
Partial: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Pagination: informer.Pagination{
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -742,15 +742,15 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "sort=metadata.name"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Sort: informer.Sort{
|
||||
Sort: sqltypes.Sort{
|
||||
Fields: [][]string{
|
||||
{"metadata", "name"}},
|
||||
Orders: []informer.SortOrder{informer.ASC},
|
||||
Orders: []sqltypes.SortOrder{sqltypes.ASC},
|
||||
},
|
||||
Filters: make([]informer.OrFilter, 0),
|
||||
Pagination: informer.Pagination{
|
||||
Filters: make([]sqltypes.OrFilter, 0),
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -763,14 +763,14 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "sort=-metadata.name"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Sort: informer.Sort{
|
||||
Sort: sqltypes.Sort{
|
||||
Fields: [][]string{{"metadata", "name"}},
|
||||
Orders: []informer.SortOrder{informer.DESC},
|
||||
Orders: []sqltypes.SortOrder{sqltypes.DESC},
|
||||
},
|
||||
Filters: make([]informer.OrFilter, 0),
|
||||
Pagination: informer.Pagination{
|
||||
Filters: make([]sqltypes.OrFilter, 0),
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -783,20 +783,20 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "sort=-metadata.name,spec.something"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Sort: informer.Sort{
|
||||
Sort: sqltypes.Sort{
|
||||
Fields: [][]string{
|
||||
{"metadata", "name"},
|
||||
{"spec", "something"},
|
||||
},
|
||||
Orders: []informer.SortOrder{
|
||||
informer.DESC,
|
||||
informer.ASC,
|
||||
Orders: []sqltypes.SortOrder{
|
||||
sqltypes.DESC,
|
||||
sqltypes.ASC,
|
||||
},
|
||||
},
|
||||
Filters: make([]informer.OrFilter, 0),
|
||||
Pagination: informer.Pagination{
|
||||
Filters: make([]sqltypes.OrFilter, 0),
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -809,17 +809,17 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "sort=-metadata.labels[beef.cattle.io/snort],metadata.labels.steer,metadata.labels[bossie.cattle.io/moo],spec.something"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Sort: informer.Sort{
|
||||
Sort: sqltypes.Sort{
|
||||
Fields: [][]string{{"metadata", "labels", "beef.cattle.io/snort"},
|
||||
{"metadata", "labels", "steer"},
|
||||
{"metadata", "labels", "bossie.cattle.io/moo"},
|
||||
{"spec", "something"}},
|
||||
Orders: []informer.SortOrder{informer.DESC, informer.ASC, informer.ASC, informer.ASC},
|
||||
Orders: []sqltypes.SortOrder{sqltypes.DESC, sqltypes.ASC, sqltypes.ASC, sqltypes.ASC},
|
||||
},
|
||||
Filters: make([]informer.OrFilter, 0),
|
||||
Pagination: informer.Pagination{
|
||||
Filters: make([]sqltypes.OrFilter, 0),
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -835,11 +835,11 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "continue=5"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Resume: "5",
|
||||
Filters: make([]informer.OrFilter, 0),
|
||||
Pagination: informer.Pagination{
|
||||
Filters: make([]sqltypes.OrFilter, 0),
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -852,11 +852,11 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "continue=5"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Resume: "5",
|
||||
Filters: make([]informer.OrFilter, 0),
|
||||
Pagination: informer.Pagination{
|
||||
Filters: make([]sqltypes.OrFilter, 0),
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -869,10 +869,10 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "limit=3"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: 3,
|
||||
Filters: make([]informer.OrFilter, 0),
|
||||
Pagination: informer.Pagination{
|
||||
Filters: make([]sqltypes.OrFilter, 0),
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 1,
|
||||
},
|
||||
},
|
||||
@@ -885,10 +885,10 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "page=3"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: make([]informer.OrFilter, 0),
|
||||
Pagination: informer.Pagination{
|
||||
Filters: make([]sqltypes.OrFilter, 0),
|
||||
Pagination: sqltypes.Pagination{
|
||||
Page: 3,
|
||||
},
|
||||
},
|
||||
@@ -901,10 +901,10 @@ func TestParseQuery(t *testing.T) {
|
||||
URL: &url.URL{RawQuery: "pagesize=20"},
|
||||
},
|
||||
},
|
||||
expectedLO: informer.ListOptions{
|
||||
expectedLO: sqltypes.ListOptions{
|
||||
ChunkSize: defaultLimit,
|
||||
Filters: make([]informer.OrFilter, 0),
|
||||
Pagination: informer.Pagination{
|
||||
Filters: make([]sqltypes.OrFilter, 0),
|
||||
Pagination: sqltypes.Pagination{
|
||||
PageSize: 20,
|
||||
Page: 1,
|
||||
},
|
||||
|
@@ -13,8 +13,8 @@ import (
|
||||
context "context"
|
||||
reflect "reflect"
|
||||
|
||||
informer "github.com/rancher/steve/pkg/sqlcache/informer"
|
||||
partition "github.com/rancher/steve/pkg/sqlcache/partition"
|
||||
sqltypes "github.com/rancher/steve/pkg/sqlcache/sqltypes"
|
||||
gomock "go.uber.org/mock/gomock"
|
||||
unstructured "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
|
||||
)
|
||||
@@ -43,7 +43,7 @@ func (m *MockCache) EXPECT() *MockCacheMockRecorder {
|
||||
}
|
||||
|
||||
// ListByOptions mocks base method.
|
||||
func (m *MockCache) ListByOptions(arg0 context.Context, arg1 informer.ListOptions, arg2 []partition.Partition, arg3 string) (*unstructured.UnstructuredList, int, string, error) {
|
||||
func (m *MockCache) ListByOptions(arg0 context.Context, arg1 sqltypes.ListOptions, arg2 []partition.Partition, arg3 string) (*unstructured.UnstructuredList, int, string, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ListByOptions", arg0, arg1, arg2, arg3)
|
||||
ret0, _ := ret[0].(*unstructured.UnstructuredList)
|
||||
|
Reference in New Issue
Block a user