Skip to content

Commit

Permalink
Prevent scaling up stacks without traffic (#542)
Browse files Browse the repository at this point in the history
* Add e2e test for HPA deletion at downscaling

fix updated parameter list of test factory

adapt test cases to new struct field

* turn on inactive HPA deletion to pass the tests

* drop the feature flag to simplify the code

* extend e2e test to include scale down of first stack

---------

Co-authored-by: Martin Linkhorst <[email protected]>
  • Loading branch information
miladbarazandeh and linki authored Mar 12, 2024
1 parent 0acab4a commit 8e7f5b0
Show file tree
Hide file tree
Showing 3 changed files with 112 additions and 4 deletions.
79 changes: 79 additions & 0 deletions cmd/e2e/generated_autoscaler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package main
import (
"fmt"
"testing"
"time"

"github.com/stretchr/testify/require"
zv1 "github.com/zalando-incubator/stackset-controller/pkg/apis/zalando.org/v1"
Expand Down Expand Up @@ -75,3 +76,81 @@ func TestGenerateAutoscaler(t *testing.T) {
}
}
}

func TestAutoscalerWithoutTraffic(t *testing.T) {
t.Parallel()

// Create a stackset with two stacks and an autoscaler for each stack
stacksetName := "autoscaler-without-traffic"
metrics := []zv1.AutoscalerMetrics{
makeCPUAutoscalerMetrics(50),
}
factory := NewTestStacksetSpecFactory(stacksetName).Ingress().Autoscaler(1, 3, metrics).StackGC(1, 30)
firstStack := "v1"
fullFirstStack := fmt.Sprintf("%s-%s", stacksetName, firstStack)
spec := factory.Create(t, firstStack)
err := createStackSet(stacksetName, 0, spec)
require.NoError(t, err)
_, err = waitForStack(t, stacksetName, firstStack)
require.NoError(t, err)
_, err = waitForHPA(t, fullFirstStack)
require.NoError(t, err)

secondStack := "v2"
fullSecondStack := fmt.Sprintf("%s-%s", stacksetName, secondStack)
spec = factory.Create(t, secondStack)
err = updateStackset(stacksetName, spec)
require.NoError(t, err)
_, err = waitForStack(t, stacksetName, secondStack)
require.NoError(t, err)
_, err = waitForHPA(t, fullSecondStack)
require.NoError(t, err)

// Switch traffic 100% to the first stack
desiredTraffic := map[string]float64{
fullFirstStack: 100,
fullSecondStack: 0,
}
err = setDesiredTrafficWeightsStackset(stacksetName, desiredTraffic)
require.NoError(t, err)
err = trafficWeightsUpdatedStackset(t, stacksetName, weightKindActual, desiredTraffic, nil).withTimeout(time.Minute * 1).await()
require.NoError(t, err)

// ensure that the HPA for the first stack is still there and that the HPA for the second stack is deleted
err = resourceDeleted(t, "hpa", fullSecondStack, hpaInterface()).withTimeout(time.Minute * 1).await()
require.NoError(t, err)
_, err = waitForHPA(t, fullFirstStack)
require.NoError(t, err)

// Switch traffic 50% to each stack
desiredTraffic = map[string]float64{
fullFirstStack: 50,
fullSecondStack: 50,
}
err = setDesiredTrafficWeightsStackset(stacksetName, desiredTraffic)
require.NoError(t, err)
err = trafficWeightsUpdatedStackset(t, stacksetName, weightKindActual, desiredTraffic, nil).withTimeout(time.Minute * 1).await()
require.NoError(t, err)

// ensure that the HPAs for both stacks are still there
_, err = waitForHPA(t, fullFirstStack)
require.NoError(t, err)
_, err = waitForHPA(t, fullSecondStack)
require.NoError(t, err)

// Switch traffic 100% to the second stack
desiredTraffic = map[string]float64{
fullFirstStack: 0,
fullSecondStack: 100,
}
err = setDesiredTrafficWeightsStackset(stacksetName, desiredTraffic)
require.NoError(t, err)
err = trafficWeightsUpdatedStackset(t, stacksetName, weightKindActual, desiredTraffic, nil).withTimeout(time.Minute * 1).await()
require.NoError(t, err)

// ensure that the HPA for the first stack is deleted and that the HPA for the second stack is still there
err = resourceDeleted(t, "hpa", fullFirstStack, hpaInterface()).withTimeout(time.Minute * 1).await()
require.NoError(t, err)
_, err = waitForHPA(t, fullSecondStack)
require.NoError(t, err)
}
4 changes: 4 additions & 0 deletions pkg/core/stack_resources.go
Original file line number Diff line number Diff line change
Expand Up @@ -253,6 +253,10 @@ func (sc *StackContainer) generateHPA(toSegment bool) (
return nil, nil
}

if sc.ScaledDown() {
return nil, nil
}

result := &autoscaling.HorizontalPodAutoscaler{
ObjectMeta: sc.resourceMeta(),
TypeMeta: metav1.TypeMeta{
Expand Down
33 changes: 29 additions & 4 deletions pkg/core/stack_resources_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1349,6 +1349,7 @@ func TestGenerateHPA(t *testing.T) {
autoscaler *zv1.Autoscaler
expectedMinReplicas *int32
expectedMaxReplicas int32
noTrafficSince time.Time
expectedMetrics []autoscaling.MetricSpec
expectedBehavior *autoscaling.HorizontalPodAutoscalerBehavior
}{
Expand Down Expand Up @@ -1382,6 +1383,24 @@ func TestGenerateHPA(t *testing.T) {
},
expectedBehavior: exampleBehavior,
},
{
name: "HPA when stack scaled down",
autoscaler: &zv1.Autoscaler{
MinReplicas: &min,
MaxReplicas: max,

Metrics: []zv1.AutoscalerMetrics{
{
Type: zv1.CPUAutoscalerMetric,
AverageUtilization: &utilization,
},
},
Behavior: exampleBehavior,
},
noTrafficSince: time.Now().Add(-time.Hour),
expectedMetrics: nil,
expectedBehavior: nil,
},
} {
t.Run(tc.name, func(t *testing.T) {
podTemplate := zv1.PodTemplateSpec{
Expand Down Expand Up @@ -1409,14 +1428,20 @@ func TestGenerateHPA(t *testing.T) {
},
},
},
noTrafficSince: tc.noTrafficSince,
scaledownTTL: time.Minute,
}

hpa, err := autoscalerContainer.GenerateHPA()
require.NoError(t, err)
require.Equal(t, tc.expectedMinReplicas, hpa.Spec.MinReplicas)
require.Equal(t, tc.expectedMaxReplicas, hpa.Spec.MaxReplicas)
require.Equal(t, tc.expectedMetrics, hpa.Spec.Metrics)
require.Equal(t, tc.expectedBehavior, hpa.Spec.Behavior)
if tc.expectedBehavior == nil {
require.Nil(t, hpa)
} else {
require.Equal(t, tc.expectedMinReplicas, hpa.Spec.MinReplicas)
require.Equal(t, tc.expectedMaxReplicas, hpa.Spec.MaxReplicas)
require.Equal(t, tc.expectedMetrics, hpa.Spec.Metrics)
require.Equal(t, tc.expectedBehavior, hpa.Spec.Behavior)
}
})
}
}
Expand Down

0 comments on commit 8e7f5b0

Please sign in to comment.