add explain tests for openapiv3

This commit is contained in:
Alexander Zielenski 2023-02-02 09:14:48 -08:00
parent 8249a827bd
commit 9597abd089
4 changed files with 28950 additions and 50 deletions

View File

@ -14,24 +14,31 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
package explain
package explain_test
import (
"errors"
"path/filepath"
"strings"
"regexp"
"testing"
"github.com/stretchr/testify/require"
"k8s.io/apimachinery/pkg/api/meta"
sptest "k8s.io/apimachinery/pkg/util/strategicpatch/testing"
"k8s.io/cli-runtime/pkg/genericclioptions"
"k8s.io/client-go/discovery"
openapiclient "k8s.io/client-go/openapi"
"k8s.io/client-go/rest"
clienttestutil "k8s.io/client-go/util/testing"
"k8s.io/kubectl/pkg/cmd/explain"
cmdtesting "k8s.io/kubectl/pkg/cmd/testing"
cmdutil "k8s.io/kubectl/pkg/cmd/util"
"k8s.io/kubectl/pkg/util/openapi"
)
var (
fakeSchema = sptest.Fake{Path: filepath.Join("..", "..", "..", "testdata", "openapi", "swagger.json")}
testDataPath = filepath.Join("..", "..", "..", "testdata")
fakeSchema = sptest.Fake{Path: filepath.Join(testDataPath, "openapi", "swagger.json")}
FakeOpenAPISchema = testOpenAPISchema{
OpenAPISchemaFn: func() (openapi.Resources, error) {
s, err := fakeSchema.OpenAPISchema()
@ -51,8 +58,8 @@ func TestExplainInvalidArgs(t *testing.T) {
tf := cmdtesting.NewTestFactory()
defer tf.Cleanup()
opts := NewExplainOptions("kubectl", genericclioptions.NewTestIOStreamsDiscard())
cmd := NewCmdExplain("kubectl", tf, genericclioptions.NewTestIOStreamsDiscard())
opts := explain.NewExplainOptions("kubectl", genericclioptions.NewTestIOStreamsDiscard())
cmd := explain.NewCmdExplain("kubectl", tf, genericclioptions.NewTestIOStreamsDiscard())
err := opts.Complete(tf, cmd, []string{})
if err != nil {
t.Fatalf("unexpected error %v", err)
@ -78,8 +85,8 @@ func TestExplainNotExistResource(t *testing.T) {
tf := cmdtesting.NewTestFactory()
defer tf.Cleanup()
opts := NewExplainOptions("kubectl", genericclioptions.NewTestIOStreamsDiscard())
cmd := NewCmdExplain("kubectl", tf, genericclioptions.NewTestIOStreamsDiscard())
opts := explain.NewExplainOptions("kubectl", genericclioptions.NewTestIOStreamsDiscard())
cmd := explain.NewCmdExplain("kubectl", tf, genericclioptions.NewTestIOStreamsDiscard())
err := opts.Complete(tf, cmd, []string{"foo"})
if err != nil {
t.Fatalf("unexpected error %v", err)
@ -96,30 +103,106 @@ func TestExplainNotExistResource(t *testing.T) {
}
}
func TestExplainNotExistVersion(t *testing.T) {
tf := cmdtesting.NewTestFactory()
defer tf.Cleanup()
type explainTestCase struct {
Name string
Args []string
Flags map[string]string
ExpectPattern []string
ExpectErrorPattern string
opts := NewExplainOptions("kubectl", genericclioptions.NewTestIOStreamsDiscard())
cmd := NewCmdExplain("kubectl", tf, genericclioptions.NewTestIOStreamsDiscard())
err := opts.Complete(tf, cmd, []string{"pods"})
// Custom OpenAPI V3 client to use for the test. If nil, a default one will
// be provided
OpenAPIV3SchemaFn func() (openapiclient.Client, error)
}
var explainV2Cases = []explainTestCase{
{
Name: "Basic",
Args: []string{"pods"},
ExpectPattern: []string{`\s*KIND:[\t ]*Pod\s*`},
},
{
Name: "Recursive",
Args: []string{"pods"},
Flags: map[string]string{"recursive": "true"},
ExpectPattern: []string{`\s*KIND:[\t ]*Pod\s*`},
},
{
Name: "DefaultAPIVersion",
Args: []string{"horizontalpodautoscalers"},
Flags: map[string]string{"api-version": "autoscaling/v1"},
ExpectPattern: []string{`\s*VERSION:[\t ]*(v1|autoscaling/v1)\s*`},
},
{
Name: "NonExistingAPIVersion",
Args: []string{"pods"},
Flags: map[string]string{"api-version": "v99"},
ExpectErrorPattern: `couldn't find resource for \"/v99, (Kind=Pod|Resource=pods)\"`,
},
{
Name: "NonExistingResource",
Args: []string{"foo"},
ExpectErrorPattern: `the server doesn't have a resource type "foo"`,
},
}
func TestExplainOpenAPIV2(t *testing.T) {
runExplainTestCases(t, explainV2Cases)
}
func TestExplainOpenAPIV3(t *testing.T) {
fallbackV3SchemaFn := func() (openapiclient.Client, error) {
fakeDiscoveryClient := discovery.NewDiscoveryClientForConfigOrDie(&rest.Config{Host: "https://not.a.real.site:65543/"})
return fakeDiscoveryClient.OpenAPIV3(), nil
}
// Returns a client that causes fallback to v2 implementation
cases := []explainTestCase{
{
// No --output, but OpenAPIV3 enabled should fall back to v2 if
// v2 is not available. Shows this by making openapiv3 client
// point to a bad URL. So the fact the proper data renders is
// indication v2 was used instead.
Name: "Fallback",
Args: []string{"pods"},
ExpectPattern: []string{`\s*KIND:[\t ]*Pod\s*`},
OpenAPIV3SchemaFn: fallbackV3SchemaFn,
},
{
Name: "NonDefaultAPIVersion",
Args: []string{"horizontalpodautoscalers"},
Flags: map[string]string{"api-version": "autoscaling/v2"},
ExpectPattern: []string{`\s*VERSION:[\t ]*(v2|autoscaling/v2)\s*`},
},
{
// Show that explicitly specifying --output plaintext-openapiv2 causes
// old implementation to be used even though OpenAPIV3 is enabled
Name: "OutputPlaintextV2",
Args: []string{"pods"},
Flags: map[string]string{"output": "plaintext-openapiv2"},
ExpectPattern: []string{`\s*KIND:[\t ]*Pod\s*`},
OpenAPIV3SchemaFn: fallbackV3SchemaFn,
},
}
cases = append(cases, explainV2Cases...)
cmdtesting.WithAlphaEnvs([]cmdutil.FeatureGate{cmdutil.ExplainOpenapiV3}, t, func(t *testing.T) {
runExplainTestCases(t, cases)
})
}
func runExplainTestCases(t *testing.T, cases []explainTestCase) {
fakeServer, err := clienttestutil.NewFakeOpenAPIV3Server(filepath.Join(testDataPath, "openapi", "v3"))
if err != nil {
t.Fatalf("unexpected error %v", err)
t.Fatalf("error starting fake openapi server: %v", err.Error())
}
opts.APIVersion = "v99"
defer fakeServer.HttpServer.Close()
err = opts.Validate()
if err != nil {
t.Fatalf("unexpected error %v", err)
openapiV3SchemaFn := func() (openapiclient.Client, error) {
fakeDiscoveryClient := discovery.NewDiscoveryClientForConfigOrDie(&rest.Config{Host: fakeServer.HttpServer.URL})
return fakeDiscoveryClient.OpenAPIV3(), nil
}
err = opts.Run()
if err.Error() != "couldn't find resource for \"/v99, Kind=Pod\"" {
t.Errorf("unexpected non-error")
}
}
func TestExplain(t *testing.T) {
tf := cmdtesting.NewTestFactory()
defer tf.Cleanup()
@ -127,29 +210,72 @@ func TestExplain(t *testing.T) {
tf.ClientConfigVal = cmdtesting.DefaultClientConfig()
ioStreams, _, buf, _ := genericclioptions.NewTestIOStreams()
cmd := NewCmdExplain("kubectl", tf, ioStreams)
cmd.Run(cmd, []string{"pods"})
if !strings.Contains(buf.String(), "KIND: Pod") {
t.Fatalf("expected output should include pod kind")
type catchFatal error
for _, tcase := range cases {
t.Run(tcase.Name, func(t *testing.T) {
// Catch os.Exit calls for tests which expect them
// and replace them with panics that we catch in each test
// to check if it is expected.
cmdutil.BehaviorOnFatal(func(str string, code int) {
panic(catchFatal(errors.New(str)))
})
defer cmdutil.DefaultBehaviorOnFatal()
var err error
func() {
defer func() {
// Catch panic and check at end of test if it is
// expected.
if panicErr := recover(); panicErr != nil {
if e := panicErr.(catchFatal); e != nil {
err = e
} else {
panic(panicErr)
}
}
}()
if tcase.OpenAPIV3SchemaFn != nil {
tf.OpenAPIV3ClientFunc = tcase.OpenAPIV3SchemaFn
} else {
tf.OpenAPIV3ClientFunc = openapiV3SchemaFn
}
cmd.Flags().Set("recursive", "true")
cmd.Run(cmd, []string{"pods"})
if !strings.Contains(buf.String(), "KIND: Pod") ||
!strings.Contains(buf.String(), "annotations\t<map[string]string>") {
t.Fatalf("expected output should include pod kind")
cmd := explain.NewCmdExplain("kubectl", tf, ioStreams)
for k, v := range tcase.Flags {
if err := cmd.Flags().Set(k, v); err != nil {
t.Fatal(err)
}
}
cmd.Run(cmd, tcase.Args)
}()
for _, rexp := range tcase.ExpectPattern {
if matched, err := regexp.MatchString(rexp, buf.String()); err != nil || !matched {
if err != nil {
t.Error(err)
} else {
t.Errorf("expected output to match regex:\n\t%s\ninstead got:\n\t%s", rexp, buf.String())
}
}
}
cmd.Flags().Set("api-version", "batch/v1")
cmd.Run(cmd, []string{"cronjobs"})
if !strings.Contains(buf.String(), "VERSION: batch/v1") {
t.Fatalf("expected output should include pod batch/v1")
if err != nil {
if matched, regexErr := regexp.MatchString(tcase.ExpectErrorPattern, err.Error()); len(tcase.ExpectErrorPattern) == 0 || regexErr != nil || !matched {
t.Fatalf("unexpected error: %s did not match regex %s (%v)", err.Error(),
tcase.ExpectErrorPattern, regexErr)
}
} else if len(tcase.ExpectErrorPattern) > 0 {
t.Fatalf("did not trigger expected error: %s in output:\n%s", tcase.ExpectErrorPattern, buf.String())
}
})
cmd.Flags().Set("api-version", "batch/v1beta1")
cmd.Run(cmd, []string{"cronjobs"})
if !strings.Contains(buf.String(), "VERSION: batch/v1beta1") {
t.Fatalf("expected output should include pod batch/v1beta1")
buf.Reset()
}
}
@ -161,11 +287,11 @@ func TestAlphaEnablement(t *testing.T) {
f := cmdtesting.NewTestFactory()
defer f.Cleanup()
cmd := NewCmdExplain("kubectl", f, genericclioptions.NewTestIOStreamsDiscard())
cmd := explain.NewCmdExplain("kubectl", f, genericclioptions.NewTestIOStreamsDiscard())
require.Nil(t, cmd.Flags().Lookup(flag), "flag %q should not be registered without the %q feature enabled", flag, feature)
cmdtesting.WithAlphaEnvs([]cmdutil.FeatureGate{feature}, t, func(t *testing.T) {
cmd := NewCmdExplain("kubectl", f, genericclioptions.NewTestIOStreamsDiscard())
cmd := explain.NewCmdExplain("kubectl", f, genericclioptions.NewTestIOStreamsDiscard())
require.NotNil(t, cmd.Flags().Lookup(flag), "flag %q should be registered with the %q feature enabled", flag, feature)
})
}

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long