Merge pull request #82333 from tlereste/fix_sort_by_cpu_memory

Fix kubectl top sort-by cpu and sort-by memory options
This commit is contained in:
Kubernetes Prow Robot 2020-01-15 04:45:32 -08:00 committed by GitHub
commit a3a9c78d01
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 423 additions and 2 deletions

View File

@ -21,6 +21,7 @@ import (
"fmt"
"io/ioutil"
"net/http"
"reflect"
"strings"
"testing"
@ -283,6 +284,147 @@ func TestTopNodeWithLabelSelectorMetrics(t *testing.T) {
}
}
func TestTopNodeWithSortByCpuMetrics(t *testing.T) {
cmdtesting.InitTestErrorHandler(t)
metrics, nodes := testNodeV1beta1MetricsData()
expectedMetrics := metricsv1beta1api.NodeMetricsList{
ListMeta: metrics.ListMeta,
Items: metrics.Items[:],
}
expectedMetricsPath := fmt.Sprintf("%s/%s/nodes", baseMetricsAddress, metricsAPIVersion)
expectedNodePath := fmt.Sprintf("/%s/%s/nodes", apiPrefix, apiVersion)
expectedNodes := []string{"node2", "node3", "node1"}
tf := cmdtesting.NewTestFactory().WithNamespace("test")
defer tf.Cleanup()
codec := scheme.Codecs.LegacyCodec(scheme.Scheme.PrioritizedVersionsAllGroups()...)
ns := scheme.Codecs
tf.Client = &fake.RESTClient{
NegotiatedSerializer: ns,
Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {
switch p, m := req.URL.Path, req.Method; {
case p == "/api":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apibody)))}, nil
case p == "/apis":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apisbody)))}, nil
case p == expectedMetricsPath && m == "GET":
body, err := marshallBody(expectedMetrics)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: body}, nil
case p == expectedNodePath && m == "GET":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: cmdtesting.ObjBody(codec, nodes)}, nil
default:
t.Fatalf("unexpected request: %#v\nGot URL: %#v\nExpected path: %#v", req, req.URL, expectedMetricsPath)
return nil, nil
}
}),
}
tf.ClientConfigVal = cmdtesting.DefaultClientConfig()
streams, _, buf, _ := genericclioptions.NewTestIOStreams()
cmd := NewCmdTopNode(tf, nil, streams)
cmd.Flags().Set("sort-by", "cpu")
cmd.Run(cmd, []string{})
// Check the presence of node names in the output.
result := buf.String()
for _, m := range expectedMetrics.Items {
if !strings.Contains(result, m.Name) {
t.Errorf("missing metrics for %s: \n%s", m.Name, result)
}
}
resultLines := strings.Split(result, "\n")
resultNodes := make([]string, len(resultLines)-2) // don't process first (header) and last (empty) line
for i, line := range resultLines[1 : len(resultLines)-1] { // don't process first (header) and last (empty) line
lineFirstColumn := strings.Split(line, " ")[0]
resultNodes[i] = lineFirstColumn
}
if !reflect.DeepEqual(resultNodes, expectedNodes) {
t.Errorf("kinds not matching:\n\texpectedKinds: %v\n\tgotKinds: %v\n", expectedNodes, resultNodes)
}
}
func TestTopNodeWithSortByMemoryMetrics(t *testing.T) {
cmdtesting.InitTestErrorHandler(t)
metrics, nodes := testNodeV1beta1MetricsData()
expectedMetrics := metricsv1beta1api.NodeMetricsList{
ListMeta: metrics.ListMeta,
Items: metrics.Items[:],
}
expectedMetricsPath := fmt.Sprintf("%s/%s/nodes", baseMetricsAddress, metricsAPIVersion)
expectedNodePath := fmt.Sprintf("/%s/%s/nodes", apiPrefix, apiVersion)
expectedNodes := []string{"node2", "node3", "node1"}
tf := cmdtesting.NewTestFactory().WithNamespace("test")
defer tf.Cleanup()
codec := scheme.Codecs.LegacyCodec(scheme.Scheme.PrioritizedVersionsAllGroups()...)
ns := scheme.Codecs
tf.Client = &fake.RESTClient{
NegotiatedSerializer: ns,
Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {
switch p, m := req.URL.Path, req.Method; {
case p == "/api":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apibody)))}, nil
case p == "/apis":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apisbody)))}, nil
case p == expectedMetricsPath && m == "GET":
body, err := marshallBody(expectedMetrics)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: body}, nil
case p == expectedNodePath && m == "GET":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: cmdtesting.ObjBody(codec, nodes)}, nil
default:
t.Fatalf("unexpected request: %#v\nGot URL: %#v\nExpected path: %#v", req, req.URL, expectedMetricsPath)
return nil, nil
}
}),
}
tf.ClientConfigVal = cmdtesting.DefaultClientConfig()
streams, _, buf, _ := genericclioptions.NewTestIOStreams()
cmd := NewCmdTopNode(tf, nil, streams)
cmd.Flags().Set("sort-by", "memory")
cmd.Run(cmd, []string{})
// Check the presence of node names in the output.
result := buf.String()
for _, m := range expectedMetrics.Items {
if !strings.Contains(result, m.Name) {
t.Errorf("missing metrics for %s: \n%s", m.Name, result)
}
}
resultLines := strings.Split(result, "\n")
resultNodes := make([]string, len(resultLines)-2) // don't process first (header) and last (empty) line
for i, line := range resultLines[1 : len(resultLines)-1] { // don't process first (header) and last (empty) line
lineFirstColumn := strings.Split(line, " ")[0]
resultNodes[i] = lineFirstColumn
}
if !reflect.DeepEqual(resultNodes, expectedNodes) {
t.Errorf("kinds not matching:\n\texpectedKinds: %v\n\tgotKinds: %v\n", expectedNodes, resultNodes)
}
}
func TestTopNodeAllMetricsFromMetricsServer(t *testing.T) {
cmdtesting.InitTestErrorHandler(t)
expectedMetrics, nodes := testNodeV1beta1MetricsData()
@ -494,3 +636,175 @@ func TestTopNodeWithLabelSelectorMetricsFromMetricsServer(t *testing.T) {
}
}
}
func TestTopNodeWithSortByCpuMetricsFromMetricsServer(t *testing.T) {
cmdtesting.InitTestErrorHandler(t)
metrics, nodes := testNodeV1beta1MetricsData()
expectedMetrics := &metricsv1beta1api.NodeMetricsList{
ListMeta: metrics.ListMeta,
Items: metrics.Items[:],
}
expectedNodes := v1.NodeList{
ListMeta: nodes.ListMeta,
Items: nodes.Items[:],
}
expectedNodePath := fmt.Sprintf("/%s/%s/nodes", apiPrefix, apiVersion)
expectedNodesNames := []string{"node2", "node3", "node1"}
tf := cmdtesting.NewTestFactory().WithNamespace("test")
defer tf.Cleanup()
codec := scheme.Codecs.LegacyCodec(scheme.Scheme.PrioritizedVersionsAllGroups()...)
ns := scheme.Codecs
tf.Client = &fake.RESTClient{
NegotiatedSerializer: ns,
Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {
switch p, m := req.URL.Path, req.Method; {
case p == "/api":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apibody)))}, nil
case p == "/apis":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apisbodyWithMetrics)))}, nil
case p == expectedNodePath && m == "GET":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: cmdtesting.ObjBody(codec, &expectedNodes)}, nil
default:
t.Fatalf("unexpected request: %#v\nGot URL: %#v\n", req, req.URL)
return nil, nil
}
}),
}
fakemetricsClientset := &metricsfake.Clientset{}
fakemetricsClientset.AddReactor("list", "nodes", func(action core.Action) (handled bool, ret runtime.Object, err error) {
return true, expectedMetrics, nil
})
tf.ClientConfigVal = cmdtesting.DefaultClientConfig()
streams, _, buf, _ := genericclioptions.NewTestIOStreams()
cmd := NewCmdTopNode(tf, nil, streams)
cmd.Flags().Set("sort-by", "cpu")
// TODO in the long run, we want to test most of our commands like this. Wire the options struct with specific mocks
// TODO then check the particular Run functionality and harvest results from fake clients
cmdOptions := &TopNodeOptions{
IOStreams: streams,
SortBy: "cpu",
}
if err := cmdOptions.Complete(tf, cmd, []string{}); err != nil {
t.Fatal(err)
}
cmdOptions.MetricsClient = fakemetricsClientset
if err := cmdOptions.Validate(); err != nil {
t.Fatal(err)
}
if err := cmdOptions.RunTopNode(); err != nil {
t.Fatal(err)
}
// Check the presence of node names in the output.
result := buf.String()
for _, m := range expectedMetrics.Items {
if !strings.Contains(result, m.Name) {
t.Errorf("missing metrics for %s: \n%s", m.Name, result)
}
}
resultLines := strings.Split(result, "\n")
resultNodes := make([]string, len(resultLines)-2) // don't process first (header) and last (empty) line
for i, line := range resultLines[1 : len(resultLines)-1] { // don't process first (header) and last (empty) line
lineFirstColumn := strings.Split(line, " ")[0]
resultNodes[i] = lineFirstColumn
}
if !reflect.DeepEqual(resultNodes, expectedNodesNames) {
t.Errorf("kinds not matching:\n\texpectedKinds: %v\n\tgotKinds: %v\n", expectedNodesNames, resultNodes)
}
}
func TestTopNodeWithSortByMemoryMetricsFromMetricsServer(t *testing.T) {
cmdtesting.InitTestErrorHandler(t)
metrics, nodes := testNodeV1beta1MetricsData()
expectedMetrics := &metricsv1beta1api.NodeMetricsList{
ListMeta: metrics.ListMeta,
Items: metrics.Items[:],
}
expectedNodes := v1.NodeList{
ListMeta: nodes.ListMeta,
Items: nodes.Items[:],
}
expectedNodePath := fmt.Sprintf("/%s/%s/nodes", apiPrefix, apiVersion)
expectedNodesNames := []string{"node2", "node3", "node1"}
tf := cmdtesting.NewTestFactory().WithNamespace("test")
defer tf.Cleanup()
codec := scheme.Codecs.LegacyCodec(scheme.Scheme.PrioritizedVersionsAllGroups()...)
ns := scheme.Codecs
tf.Client = &fake.RESTClient{
NegotiatedSerializer: ns,
Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) {
switch p, m := req.URL.Path, req.Method; {
case p == "/api":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apibody)))}, nil
case p == "/apis":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: ioutil.NopCloser(bytes.NewReader([]byte(apisbodyWithMetrics)))}, nil
case p == expectedNodePath && m == "GET":
return &http.Response{StatusCode: 200, Header: cmdtesting.DefaultHeader(), Body: cmdtesting.ObjBody(codec, &expectedNodes)}, nil
default:
t.Fatalf("unexpected request: %#v\nGot URL: %#v\n", req, req.URL)
return nil, nil
}
}),
}
fakemetricsClientset := &metricsfake.Clientset{}
fakemetricsClientset.AddReactor("list", "nodes", func(action core.Action) (handled bool, ret runtime.Object, err error) {
return true, expectedMetrics, nil
})
tf.ClientConfigVal = cmdtesting.DefaultClientConfig()
streams, _, buf, _ := genericclioptions.NewTestIOStreams()
cmd := NewCmdTopNode(tf, nil, streams)
cmd.Flags().Set("sort-by", "memory")
// TODO in the long run, we want to test most of our commands like this. Wire the options struct with specific mocks
// TODO then check the particular Run functionality and harvest results from fake clients
cmdOptions := &TopNodeOptions{
IOStreams: streams,
SortBy: "memory",
}
if err := cmdOptions.Complete(tf, cmd, []string{}); err != nil {
t.Fatal(err)
}
cmdOptions.MetricsClient = fakemetricsClientset
if err := cmdOptions.Validate(); err != nil {
t.Fatal(err)
}
if err := cmdOptions.RunTopNode(); err != nil {
t.Fatal(err)
}
// Check the presence of node names in the output.
result := buf.String()
for _, m := range expectedMetrics.Items {
if !strings.Contains(result, m.Name) {
t.Errorf("missing metrics for %s: \n%s", m.Name, result)
}
}
resultLines := strings.Split(result, "\n")
resultNodes := make([]string, len(resultLines)-2) // don't process first (header) and last (empty) line
for i, line := range resultLines[1 : len(resultLines)-1] { // don't process first (header) and last (empty) line
lineFirstColumn := strings.Split(line, " ")[0]
resultNodes[i] = lineFirstColumn
}
if !reflect.DeepEqual(resultNodes, expectedNodesNames) {
t.Errorf("kinds not matching:\n\texpectedKinds: %v\n\tgotKinds: %v\n", expectedNodesNames, resultNodes)
}
}

View File

@ -21,6 +21,7 @@ import (
"io/ioutil"
"net/http"
"net/url"
"reflect"
"strings"
"testing"
"time"
@ -96,6 +97,7 @@ func TestTopPod(t *testing.T) {
args []string
expectedPath string
expectedQuery string
expectedPods []string
namespaces []string
containers bool
listsNamespaces bool
@ -141,6 +143,20 @@ func TestTopPod(t *testing.T) {
namespaces: []string{testNS},
containers: true,
},
{
name: "pod with label sort by cpu",
flags: map[string]string{"sort-by": "cpu"},
expectedPath: topPathPrefix + "/namespaces/" + testNS + "/pods",
expectedPods: []string{"pod2", "pod3", "pod1"},
namespaces: []string{testNS, testNS, testNS},
},
{
name: "pod with label sort by memory",
flags: map[string]string{"sort-by": "memory"},
expectedPath: topPathPrefix + "/namespaces/" + testNS + "/pods",
expectedPods: []string{"pod2", "pod3", "pod1"},
namespaces: []string{testNS, testNS, testNS},
},
}
cmdtesting.InitTestErrorHandler(t)
for _, testCase := range testCases {
@ -210,6 +226,7 @@ func TestTopPod(t *testing.T) {
// Check the presence of pod names&namespaces/container names in the output.
result := buf.String()
if testCase.containers {
for _, containerName := range expectedContainerNames {
if !strings.Contains(result, containerName) {
@ -233,6 +250,19 @@ func TestTopPod(t *testing.T) {
if cmdutil.GetFlagBool(cmd, "no-headers") && strings.Contains(result, "MEMORY") {
t.Errorf("%s: unexpected headers with no-headers option set: \n%s", testCase.name, result)
}
if cmdutil.GetFlagString(cmd, "sort-by") == "cpu" || cmdutil.GetFlagString(cmd, "sort-by") == "memory" {
resultLines := strings.Split(result, "\n")
resultPods := make([]string, len(resultLines)-2) // don't process first (header) and last (empty) line
for i, line := range resultLines[1 : len(resultLines)-1] { // don't process first (header) and last (empty) line
lineFirstColumn := strings.Split(line, " ")[0]
resultPods[i] = lineFirstColumn
}
if !reflect.DeepEqual(testCase.expectedPods, resultPods) {
t.Errorf("kinds not matching:\n\texpectedKinds: %v\n\tgotKinds: %v\n", testCase.expectedPods, resultPods)
}
}
})
}
}
@ -246,6 +276,7 @@ func TestTopPodWithMetricsServer(t *testing.T) {
args []string
expectedPath string
expectedQuery string
expectedPods []string
namespaces []string
containers bool
listsNamespaces bool
@ -283,6 +314,20 @@ func TestTopPodWithMetricsServer(t *testing.T) {
namespaces: []string{testNS},
containers: true,
},
{
name: "pod with label sort by cpu",
options: &TopPodOptions{SortBy: "cpu"},
expectedPath: topPathPrefix + "/namespaces/" + testNS + "/pods",
expectedPods: []string{"pod2", "pod3", "pod1"},
namespaces: []string{testNS, testNS, testNS},
},
{
name: "pod with label sort by memory",
options: &TopPodOptions{SortBy: "memory"},
expectedPath: topPathPrefix + "/namespaces/" + testNS + "/pods",
expectedPods: []string{"pod2", "pod3", "pod1"},
namespaces: []string{testNS, testNS, testNS},
},
}
cmdtesting.InitTestErrorHandler(t)
for _, testCase := range testCases {
@ -387,6 +432,19 @@ func TestTopPodWithMetricsServer(t *testing.T) {
t.Errorf("unexpected metrics for %s: \n%s", name, result)
}
}
if cmdutil.GetFlagString(cmd, "sort-by") == "cpu" || cmdutil.GetFlagString(cmd, "sort-by") == "memory" {
resultLines := strings.Split(result, "\n")
resultPods := make([]string, len(resultLines)-2) // don't process first (header) and last (empty) line
for i, line := range resultLines[1 : len(resultLines)-1] { // don't process first (header) and last (empty) line
lineFirstColumn := strings.Split(line, " ")[0]
resultPods[i] = lineFirstColumn
}
if !reflect.DeepEqual(testCase.expectedPods, resultPods) {
t.Errorf("kinds not matching:\n\texpectedKinds: %v\n\tgotKinds: %v\n", testCase.expectedPods, resultPods)
}
}
})
}
}
@ -455,6 +513,7 @@ func TestTopPodCustomDefaults(t *testing.T) {
args []string
expectedPath string
expectedQuery string
expectedPods []string
namespaces []string
containers bool
listsNamespaces bool
@ -492,6 +551,20 @@ func TestTopPodCustomDefaults(t *testing.T) {
namespaces: []string{testNS},
containers: true,
},
{
name: "pod with label sort by cpu",
flags: map[string]string{"sort-by": "cpu"},
expectedPath: customTopPathPrefix + "/namespaces/" + testNS + "/pods",
expectedPods: []string{"pod2", "pod3", "pod1"},
namespaces: []string{testNS, testNS, testNS},
},
{
name: "pod with label sort by memory",
flags: map[string]string{"sort-by": "memory"},
expectedPath: customTopPathPrefix + "/namespaces/" + testNS + "/pods",
expectedPods: []string{"pod2", "pod3", "pod1"},
namespaces: []string{testNS, testNS, testNS},
},
}
cmdtesting.InitTestErrorHandler(t)
for _, testCase := range testCases {
@ -590,6 +663,19 @@ func TestTopPodCustomDefaults(t *testing.T) {
t.Errorf("%s: unexpected metrics for %s: \n%s", testCase.name, name, result)
}
}
if cmdutil.GetFlagString(cmd, "sort-by") == "cpu" || cmdutil.GetFlagString(cmd, "sort-by") == "memory" {
resultLines := strings.Split(result, "\n")
resultPods := make([]string, len(resultLines)-2) // don't process first (header) and last (empty) line
for i, line := range resultLines[1 : len(resultLines)-1] { // don't process first (header) and last (empty) line
lineFirstColumn := strings.Split(line, " ")[0]
resultPods[i] = lineFirstColumn
}
if !reflect.DeepEqual(testCase.expectedPods, resultPods) {
t.Errorf("kinds not matching:\n\texpectedKinds: %v\n\tgotKinds: %v\n", testCase.expectedPods, resultPods)
}
}
})
}
}

View File

@ -141,6 +141,15 @@ func testNodeV1beta1MetricsData() (*metricsv1beta1api.NodeMetricsList, *v1.NodeL
v1.ResourceStorage: *resource.NewQuantity(7*(1024*1024), resource.DecimalSI),
},
},
{
ObjectMeta: metav1.ObjectMeta{Name: "node3", ResourceVersion: "11"},
Window: metav1.Duration{Duration: time.Minute},
Usage: v1.ResourceList{
v1.ResourceCPU: *resource.NewMilliQuantity(3, resource.DecimalSI),
v1.ResourceMemory: *resource.NewQuantity(4*(1024*1024), resource.DecimalSI),
v1.ResourceStorage: *resource.NewQuantity(5*(1024*1024), resource.DecimalSI),
},
},
},
}
nodes := &v1.NodeList{
@ -168,6 +177,16 @@ func testNodeV1beta1MetricsData() (*metricsv1beta1api.NodeMetricsList, *v1.NodeL
},
},
},
{
ObjectMeta: metav1.ObjectMeta{Name: "node3", ResourceVersion: "11"},
Status: v1.NodeStatus{
Allocatable: v1.ResourceList{
v1.ResourceCPU: *resource.NewMilliQuantity(30, resource.DecimalSI),
v1.ResourceMemory: *resource.NewQuantity(40*(1024*1024), resource.DecimalSI),
v1.ResourceStorage: *resource.NewQuantity(50*(1024*1024), resource.DecimalSI),
},
},
},
},
}
return metrics, nodes

View File

@ -64,6 +64,7 @@ func (n *NodeMetricsSorter) Len() int {
func (n *NodeMetricsSorter) Swap(i, j int) {
n.metrics[i], n.metrics[j] = n.metrics[j], n.metrics[i]
n.usages[i], n.usages[j] = n.usages[j], n.usages[i]
}
func (n *NodeMetricsSorter) Less(i, j int) bool {
@ -71,7 +72,7 @@ func (n *NodeMetricsSorter) Less(i, j int) bool {
case "cpu":
qi := n.usages[i][v1.ResourceCPU]
qj := n.usages[j][v1.ResourceCPU]
return qi.Value() > qj.Value()
return qi.MilliValue() > qj.MilliValue()
case "memory":
qi := n.usages[i][v1.ResourceMemory]
qj := n.usages[j][v1.ResourceMemory]
@ -109,6 +110,7 @@ func (p *PodMetricsSorter) Len() int {
func (p *PodMetricsSorter) Swap(i, j int) {
p.metrics[i], p.metrics[j] = p.metrics[j], p.metrics[i]
p.podMetrics[i], p.podMetrics[j] = p.podMetrics[j], p.podMetrics[i]
}
func (p *PodMetricsSorter) Less(i, j int) bool {
@ -116,7 +118,7 @@ func (p *PodMetricsSorter) Less(i, j int) bool {
case "cpu":
qi := p.podMetrics[i][v1.ResourceCPU]
qj := p.podMetrics[j][v1.ResourceCPU]
return qi.Value() > qj.Value()
return qi.MilliValue() > qj.MilliValue()
case "memory":
qi := p.podMetrics[i][v1.ResourceMemory]
qj := p.podMetrics[j][v1.ResourceMemory]