From 7a6b4a9215e8a542b1297ea0022873d13f15d1d3 Mon Sep 17 00:00:00 2001 From: Patrick Ohly Date: Thu, 25 May 2023 15:01:05 +0200 Subject: [PATCH] dra scheduler plugin test: fix loopvar bug and "reserve" expected data The `listAll` function returned a slice where all pointers referred to the same instance. That instance had the value of the last list entry. As a result, unit tests only compared that element. During the reserve phase, the first claim gets reserved in two test cases. Those two tests must expect that change. That hadn't been noticed before because that first claim didn't get compared. --- .../dynamicresources/dynamicresources_test.go | 30 ++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/pkg/scheduler/framework/plugins/dynamicresources/dynamicresources_test.go b/pkg/scheduler/framework/plugins/dynamicresources/dynamicresources_test.go index 5d09769aa39..32f070422eb 100644 --- a/pkg/scheduler/framework/plugins/dynamicresources/dynamicresources_test.go +++ b/pkg/scheduler/framework/plugins/dynamicresources/dynamicresources_test.go @@ -104,7 +104,7 @@ var ( Obj() inUseClaim = st.FromResourceClaim(pendingImmediateClaim). Allocation(&resourcev1alpha2.AllocationResult{}). - ReservedFor(resourcev1alpha2.ResourceClaimConsumerReference{UID: types.UID(podUID)}). + ReservedFor(resourcev1alpha2.ResourceClaimConsumerReference{Resource: "pods", Name: podName, UID: types.UID(podUID)}). Obj() allocatedClaim = st.FromResourceClaim(pendingDelayedClaim). OwnerReference(podName, podUID, podKind). @@ -214,10 +214,36 @@ func TestPlugin(t *testing.T) { "claim-reference": { pod: podWithClaimName, claims: []*resourcev1alpha2.ResourceClaim{allocatedClaim, otherClaim}, + want: want{ + reserve: result{ + changes: change{ + claim: func(claim *resourcev1alpha2.ResourceClaim) *resourcev1alpha2.ResourceClaim { + if claim.Name == claimName { + claim = claim.DeepCopy() + claim.Status.ReservedFor = inUseClaim.Status.ReservedFor + } + return claim + }, + }, + }, + }, }, "claim-template": { pod: podWithClaimTemplate, claims: []*resourcev1alpha2.ResourceClaim{allocatedClaim, otherClaim}, + want: want{ + reserve: result{ + changes: change{ + claim: func(claim *resourcev1alpha2.ResourceClaim) *resourcev1alpha2.ResourceClaim { + if claim.Name == claimName { + claim = claim.DeepCopy() + claim.Status.ReservedFor = inUseClaim.Status.ReservedFor + } + return claim + }, + }, + }, + }, }, "missing-claim": { pod: podWithClaimTemplate, @@ -589,11 +615,13 @@ func (tc *testContext) listAll(t *testing.T) (objects []metav1.Object) { claims, err := tc.client.ResourceV1alpha2().ResourceClaims("").List(tc.ctx, metav1.ListOptions{}) require.NoError(t, err, "list claims") for _, claim := range claims.Items { + claim := claim objects = append(objects, &claim) } schedulings, err := tc.client.ResourceV1alpha2().PodSchedulingContexts("").List(tc.ctx, metav1.ListOptions{}) require.NoError(t, err, "list pod scheduling") for _, scheduling := range schedulings.Items { + scheduling := scheduling objects = append(objects, &scheduling) }