diff --git a/tests/results/dp-perf/2.1.0/2.1.0-oss.md b/tests/results/dp-perf/2.1.0/2.1.0-oss.md new file mode 100644 index 0000000000..3f2df54190 --- /dev/null +++ b/tests/results/dp-perf/2.1.0/2.1.0-oss.md @@ -0,0 +1,90 @@ +# Results + +## Test environment + +NGINX Plus: false + +NGINX Gateway Fabric: + +- Commit: 43424f7eafc27e3fed07cef693614a7f389a3359 +- Date: 2025-08-12T16:35:50Z +- Dirty: false + +GKE Cluster: + +- Node count: 12 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 16 +- RAM per node: 65851524Ki +- Max pods per node: 110 +- Zone: us-west1-b +- Instance Type: n2d-standard-16 + +## Summary: + +- Average latency slightly increased across all routing methods, saw the same trend from 1.6 -> 2.0. + +## Test1: Running latte path based routing + +```text +Requests [total, rate, throughput] 30000, 1000.01, 999.98 +Duration [total, attack, wait] 30.001s, 30s, 860.144µs +Latencies [min, mean, 50, 90, 95, 99, max] 708.68µs, 924.81µs, 900.213µs, 1.012ms, 1.056ms, 1.235ms, 25.908ms +Bytes In [total, mean] 4800000, 160.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +## Test2: Running coffee header based routing + +```text +Requests [total, rate, throughput] 30000, 1000.01, 999.98 +Duration [total, attack, wait] 30.001s, 30s, 916.828µs +Latencies [min, mean, 50, 90, 95, 99, max] 751.086µs, 950.524µs, 929.692µs, 1.031ms, 1.073ms, 1.246ms, 21.708ms +Bytes In [total, mean] 4830000, 161.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +## Test3: Running coffee query based routing + +```text +Requests [total, rate, throughput] 30000, 1000.04, 1000.01 +Duration [total, attack, wait] 30s, 29.999s, 910.096µs +Latencies [min, mean, 50, 90, 95, 99, max] 737.72µs, 952.101µs, 931.79µs, 1.04ms, 1.084ms, 1.257ms, 21.421ms +Bytes In [total, mean] 5070000, 169.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +## Test4: Running tea GET method based routing + +```text +Requests [total, rate, throughput] 30000, 1000.04, 1000.00 +Duration [total, attack, wait] 30s, 29.999s, 953.01µs +Latencies [min, mean, 50, 90, 95, 99, max] 735.071µs, 943.252µs, 913.494µs, 1.019ms, 1.065ms, 1.251ms, 22.035ms +Bytes In [total, mean] 4740000, 158.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +## Test5: Running tea POST method based routing + +```text +Requests [total, rate, throughput] 30000, 1000.04, 1000.01 +Duration [total, attack, wait] 30s, 29.999s, 915.921µs +Latencies [min, mean, 50, 90, 95, 99, max] 734.844µs, 941.777µs, 917.299µs, 1.026ms, 1.069ms, 1.23ms, 16.245ms +Bytes In [total, mean] 4740000, 158.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` diff --git a/tests/results/dp-perf/2.1.0/2.1.0-plus.md b/tests/results/dp-perf/2.1.0/2.1.0-plus.md new file mode 100644 index 0000000000..d7518e2927 --- /dev/null +++ b/tests/results/dp-perf/2.1.0/2.1.0-plus.md @@ -0,0 +1,90 @@ +# Results + +## Test environment + +NGINX Plus: true + +NGINX Gateway Fabric: + +- Commit: 43424f7eafc27e3fed07cef693614a7f389a3359 +- Date: 2025-08-12T16:35:50Z +- Dirty: false + +GKE Cluster: + +- Node count: 12 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 16 +- RAM per node: 65851524Ki +- Max pods per node: 110 +- Zone: us-west1-b +- Instance Type: n2d-standard-16 + +## Summary: + +- Average latency slightly decreased across all routing methods. + +## Test1: Running latte path based routing + +```text +Requests [total, rate, throughput] 30000, 1000.04, 1000.01 +Duration [total, attack, wait] 30s, 29.999s, 864.169µs +Latencies [min, mean, 50, 90, 95, 99, max] 676.161µs, 888.566µs, 842.896µs, 960.735µs, 1.026ms, 1.385ms, 35.079ms +Bytes In [total, mean] 4770000, 159.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +## Test2: Running coffee header based routing + +```text +Requests [total, rate, throughput] 30000, 1000.03, 1000.01 +Duration [total, attack, wait] 30s, 29.999s, 789.743µs +Latencies [min, mean, 50, 90, 95, 99, max] 696.868µs, 888.515µs, 867.086µs, 957.192µs, 1.004ms, 1.322ms, 20.014ms +Bytes In [total, mean] 4800000, 160.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +## Test3: Running coffee query based routing + +```text +Requests [total, rate, throughput] 30000, 1000.04, 1000.01 +Duration [total, attack, wait] 30s, 29.999s, 825.991µs +Latencies [min, mean, 50, 90, 95, 99, max] 696.245µs, 901.93µs, 874.173µs, 978.297µs, 1.03ms, 1.272ms, 22.813ms +Bytes In [total, mean] 5040000, 168.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +## Test4: Running tea GET method based routing + +```text +Requests [total, rate, throughput] 30000, 1000.04, 1000.01 +Duration [total, attack, wait] 30s, 29.999s, 845.195µs +Latencies [min, mean, 50, 90, 95, 99, max] 696.031µs, 906.906µs, 878.335µs, 989.976µs, 1.041ms, 1.267ms, 25.992ms +Bytes In [total, mean] 4710000, 157.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +## Test5: Running tea POST method based routing + +```text +Requests [total, rate, throughput] 30000, 1000.04, 1000.01 +Duration [total, attack, wait] 30s, 29.999s, 947.993µs +Latencies [min, mean, 50, 90, 95, 99, max] 716.416µs, 908.156µs, 883.623µs, 996.843µs, 1.054ms, 1.289ms, 17.605ms +Bytes In [total, mean] 4710000, 157.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` diff --git a/tests/results/longevity/2.1.0/2.1.0-oss.md b/tests/results/longevity/2.1.0/2.1.0-oss.md new file mode 100644 index 0000000000..1d86f70ba8 --- /dev/null +++ b/tests/results/longevity/2.1.0/2.1.0-oss.md @@ -0,0 +1,321 @@ +# Results + +## Test environment + +NGINX Plus: false + +NGINX Gateway Fabric: + +- Commit: 00112a4f92025967d82b04fccc9df98b314e06b8 +- Date: 2025-08-08T19:16:31Z +- Dirty: false + +GKE Cluster: + +- Node count: 3 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 2 +- RAM per node: 4015676Ki +- Max pods per node: 110 +- Zone: us-central1-c +- Instance Type: e2-medium + +## Summary: + +- Overwhelming majority of traffic responses were non-2xx or 3xx responses. +- Increase in memory usage for both NGF and NGINX. +- CPU usage remained consistant with past results. +- Errors seen seem to be similar to the singular error in the 1.6.0 plus test run. + +## Traffic + +HTTP: + +```text +Running 5760m test @ http://cafe.example.com/coffee + 2 threads and 100 connections + Thread Stats Avg Stdev Max +/- Stdev + Latency 99.75ms 99.46ms 2.00s 86.19% + Req/Sec 503.99 399.94 5.27k 72.34% + 340520035 requests in 5760.00m, 97.24GB read + Socket errors: connect 0, read 52470, write 0, timeout 74144 + Non-2xx or 3xx responses: 307049999 +Requests/sec: 985.30 +Transfer/sec: 295.05KB +``` + +HTTPS: + +```text +Running 5760m test @ https://cafe.example.com/tea + 2 threads and 100 connections + Thread Stats Avg Stdev Max +/- Stdev + Latency 105.85ms 99.39ms 1.98s 83.25% + Req/Sec 467.11 378.48 5.08k 73.91% + 315478863 requests in 5760.00m, 90.82GB read + Socket errors: connect 0, read 51813, write 0, timeout 69436 + Non-2xx or 3xx responses: 269078873 +Requests/sec: 912.84 +Transfer/sec: 275.55KB +``` + +## Key Metrics + +### Containers memory + +![oss-memory.png](oss-memory.png) + +### NGF Container Memory + +![oss-ngf-memory.png](oss-ngf-memory.png) + +### Containers CPU + +![oss-cpu.png](oss-cpu.png) + + +## Error Logs + +### nginx-gateway + +Logs have been reduced with some information and unique logs kept. However most were the "failed to list" or "cannot watch resource" + +error=failed to list *v1.ServiceAccount: Unauthorized;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:226 +k8s.io/apimachinery/pkg/util/runtime.handleError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:217 +k8s.io/apimachinery/pkg/util/runtime.HandleErrorWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:203 +k8s.io/client-go/tools/cache.DefaultWatchErrorHandler + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:200 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:360 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:358 +k8s.io/client-go/tools/cache.(*controller).RunWithContext.(*Group).StartWithContext.func3 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:63 +k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:72;ts=2025-08-12T21:16:49Z;type=*v1.ServiceAccount +error=failed to list *v1.Service: Unauthorized;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:226 +k8s.io/apimachinery/pkg/util/runtime.handleError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:217 +k8s.io/apimachinery/pkg/util/runtime.HandleErrorWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:203 +k8s.io/client-go/tools/cache.DefaultWatchErrorHandler + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:200 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:360 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:358 +k8s.io/client-go/tools/cache.(*controller).RunWithContext.(*Group).StartWithContext.func3 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:63 +k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:72;ts=2025-08-12T21:16:48Z;type=*v1.Service +error=failed to list *v1.ConfigMap: Unauthorized;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError +.. +error=failed to list *v1.HTTPRoute: Unauthorized;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError +.. +error=failed to list *v1.GatewayClass: Unauthorized;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError +.. +error=failed to list *v1.EndpointSlice: Unauthorized;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError +.. +error=failed to list *v1.Secret: Unauthorized;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError +..... + +error=leader election lost;level=error;msg=error received after stop sequence was engaged;stacktrace=sigs.k8s.io/controller-runtime/pkg/manager.(*controllerManager).engageStopProcedure.func1 + pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/manager/internal.go:512;ts=2025-08-12T21:16:30Z +level=error;msg=error retrieving resource lock nginx-gateway/ngf-longevity-nginx-gateway-fabric-leader-election: context deadline exceeded;stacktrace=k8s.io/client-go/tools/leaderelection.(*LeaderElector).tryAcquireOrRenew + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:436 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew.func1.1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:285 +k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext.func2 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/loop.go:87 +k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/loop.go:88 +k8s.io/apimachinery/pkg/util/wait.PollUntilContextTimeout + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/poll.go:48 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:283 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.JitterUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:210 +k8s.io/apimachinery/pkg/util/wait.Until + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:163 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:282 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).Run + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:221 +sigs.k8s.io/controller-runtime/pkg/manager.(*controllerManager).Start.func3 + pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/manager/internal.go:449;ts=2025-08-12T21:16:30Z +level=error;msg=Failed to update lock optimistically: context deadline exceeded, falling back to slow path;stacktrace=k8s.io/client-go/tools/leaderelection.(*LeaderElector).tryAcquireOrRenew + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:429 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew.func1.1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:285 +k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext.func2 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/loop.go:87 +k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/loop.go:88 +k8s.io/apimachinery/pkg/util/wait.PollUntilContextTimeout + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/poll.go:48 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:283 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.JitterUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:210 +k8s.io/apimachinery/pkg/util/wait.Until + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:163 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:282 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).Run + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:221 +sigs.k8s.io/controller-runtime/pkg/manager.(*controllerManager).Start.func3 + pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/manager/internal.go:449;ts=2025-08-12T21:16:30Z +.. +error=serviceaccounts is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot watch resource "serviceaccounts" in API group "" at the cluster scope;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:226 +k8s.io/apimachinery/pkg/util/runtime.handleError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:217 +k8s.io/apimachinery/pkg/util/runtime.HandleErrorWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:203 +k8s.io/client-go/tools/cache.DefaultWatchErrorHandler + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:200 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:360 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:358 +k8s.io/client-go/tools/cache.(*controller).RunWithContext.(*Group).StartWithContext.func3 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:63 +k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:72;ts=2025-08-12T21:16:28Z;type=*v1.ServiceAccount +error=configmaps is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot watch resource "configmaps" in API group "" at the cluster scope;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:226 +k8s.io/apimachinery/pkg/util/runtime.handleError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:217 +k8s.io/apimachinery/pkg/util/runtime.HandleErrorWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:203 +k8s.io/client-go/tools/cache.DefaultWatchErrorHandler + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:200 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:360 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:358 +k8s.io/client-go/tools/cache.(*controller).RunWithContext.(*Group).StartWithContext.func3 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:63 +k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:72;ts=2025-08-12T21:16:28Z;type=*v1.ConfigMap +error=failed to list *v1.PartialObjectMetadata: customresourcedefinitions.apiextensions.k8s.io is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot list resource "customresourcedefinitions" in API group "apiextensions.k8s.io" at the cluster scope;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:226 +k8s.io/apimachinery/pkg/util/runtime.handleError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:217 +k8s.io/apimachinery/pkg/util/runtime.HandleErrorWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:203 +k8s.io/client-go/tools/cache.DefaultWatchErrorHandler + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:200 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:360 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:358 +k8s.io/client-go/tools/cache.(*controller).RunWithContext.(*Group).StartWithContext.func3 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:63 +k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:72;ts=2025-08-12T21:16:28Z;type=*v1.PartialObjectMetadata +error=failed to list *v1.GRPCRoute: grpcroutes.gateway.networking.k8s.io is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot list resource "grpcroutes" in API group "gateway.networking.k8s.io" at the cluster scope;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError +.. +error=failed to list *v1.GatewayClass: gatewayclasses.gateway.networking.k8s.io is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot list resource "gatewayclasses" in API group "gateway.networking.k8s.io" at the cluster scope;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError +.. +level=error;msg=error retrieving resource lock nginx-gateway/ngf-longevity-nginx-gateway-fabric-leader-election: leases.coordination.k8s.io "ngf-longevity-nginx-gateway-fabric-leader-election" is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot get resource "leases" in API group "coordination.k8s.io" in the namespace "nginx-gateway";stacktrace=k8s.io/client-go/tools/leaderelection.(*LeaderElector).tryAcquireOrRenew + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:436 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew.func1.1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:285 +k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext.func2 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/loop.go:87 +k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/loop.go:88 +k8s.io/apimachinery/pkg/util/wait.PollUntilContextTimeout + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/poll.go:48 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:283 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.JitterUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:210 +k8s.io/apimachinery/pkg/util/wait.Until + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:163 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:282 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).Run + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:221 +sigs.k8s.io/controller-runtime/pkg/manager.(*controllerManager).Start.func3 + pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/manager/internal.go:449;ts=2025-08-12T21:16:26Z +level=error;msg=Failed to update lock optimistically: leases.coordination.k8s.io "ngf-longevity-nginx-gateway-fabric-leader-election" is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot update resource "leases" in API group "coordination.k8s.io" in the namespace "nginx-gateway", falling back to slow path;stacktrace=k8s.io/client-go/tools/leaderelection.(*LeaderElector).tryAcquireOrRenew +.. +error=gateways.gateway.networking.k8s.io is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot watch resource "gateways" in API group "gateway.networking.k8s.io" at the cluster scope;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError +.. + +error=nginxproxies.gateway.nginx.org is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot watch resource "nginxproxies" in API group "gateway.nginx.org" at the cluster scope;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError +.. + +### nginx + +Make command errored because there were so many errors in nginx-gateway that none were recorded if any. diff --git a/tests/results/longevity/2.1.0/2.1.0-plus.md b/tests/results/longevity/2.1.0/2.1.0-plus.md new file mode 100644 index 0000000000..0d6cb645b2 --- /dev/null +++ b/tests/results/longevity/2.1.0/2.1.0-plus.md @@ -0,0 +1,224 @@ +# Results + +## Test environment + +NGINX Plus: true + +NGINX Gateway Fabric: + +- Commit: 00112a4f92025967d82b04fccc9df98b314e06b8 +- Date: 2025-08-08T19:16:31Z +- Dirty: false + +GKE Cluster: + +- Node count: 3 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 2 +- RAM per node: 4015668Ki +- Max pods per node: 110 +- Zone: us-central1-c +- Instance Type: e2-medium + +## Summary: + +- Results may be incomplete as traffic statistics weren't collected. This could be due to test run user error, or due to the plethora of errors which occurred. +- CPU usage remained consistant with past results. +- NGINX memory remained consistant with past results while NGF memory seemed to continually climb. +- Errors seen seem to be similar to the singular error in the 1.6.0 plus test run. + + +## Traffic + +HTTP: + +```text +``` + +HTTPS: + +```text +``` + +## Key Metrics + +### Containers memory + +![plus-memory.png](plus-memory.png) + +### NGF Container Memory + +![plus-ngf-memory.png](plus-ngf-memory.png) + +### Containers CPU + +![plus-cpu.png](plus-cpu.png) + + +## Error Logs + +### nginx-gateway + +Logs have been reduced with some information and unique logs kept. However most were the "failed to list" or "cannot watch resource". +Quite a few logs on lock issues. + +error=failed to list *v2.HorizontalPodAutoscaler: Unauthorized;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:226 +k8s.io/apimachinery/pkg/util/runtime.handleError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:217 +k8s.io/apimachinery/pkg/util/runtime.HandleErrorWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:203 +k8s.io/client-go/tools/cache.DefaultWatchErrorHandler + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:200 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:360 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:358 +k8s.io/client-go/tools/cache.(*controller).RunWithContext.(*Group).StartWithContext.func3 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:63 +k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:72;ts=2025-08-12T22:49:01Z;type=*v2.HorizontalPodAutoscaler +error=failed to list *v1.EndpointSlice: Unauthorized;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:226 +k8s.io/apimachinery/pkg/util/runtime.handleError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:217 +k8s.io/apimachinery/pkg/util/runtime.HandleErrorWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:203 +k8s.io/client-go/tools/cache.DefaultWatchErrorHandler + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:200 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:360 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:358 +k8s.io/client-go/tools/cache.(*controller).RunWithContext.(*Group).StartWithContext.func3 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:63 +k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:72;ts=2025-08-12T22:48:59Z;type=*v1.EndpointSlice +.. +error=Unauthorized;event={'name': 'ngf-longevity-nginx-gateway-fabric-leader-election.185b269a5517109a', 'namespace': 'nginx-gateway'};level=error;msg=Server rejected event (will not retry!);stacktrace=k8s.io/client-go/tools/record.recordEvent + pkg/mod/k8s.io/client-go@v0.33.3/tools/record/event.go:359 +k8s.io/client-go/tools/record.(*eventBroadcasterImpl).recordToSink + pkg/mod/k8s.io/client-go@v0.33.3/tools/record/event.go:302 +k8s.io/client-go/tools/record.(*eventBroadcasterImpl).StartRecordingToSink.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/record/event.go:279 +k8s.io/client-go/tools/record.(*eventBroadcasterImpl).StartEventWatcher.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/record/event.go:418;ts=2025-08-12T22:48:44Z +error=leader election lost;level=error;msg=error received after stop sequence was engaged;stacktrace=sigs.k8s.io/controller-runtime/pkg/manager.(*controllerManager).engageStopProcedure.func1 + pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/manager/internal.go:512;ts=2025-08-12T22:48:44Z +level=error;msg=error retrieving resource lock nginx-gateway/ngf-longevity-nginx-gateway-fabric-leader-election: context deadline exceeded;stacktrace=k8s.io/client-go/tools/leaderelection.(*LeaderElector).tryAcquireOrRenew + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:436 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew.func1.1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:285 +k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext.func2 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/loop.go:87 +k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/loop.go:88 +k8s.io/apimachinery/pkg/util/wait.PollUntilContextTimeout + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/poll.go:48 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:283 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.JitterUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:210 +k8s.io/apimachinery/pkg/util/wait.Until + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:163 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:282 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).Run + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:221 +sigs.k8s.io/controller-runtime/pkg/manager.(*controllerManager).Start.func3 + pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/manager/internal.go:449;ts=2025-08-12T22:48:44Z +level=error;msg=Failed to update lock optimistically: context deadline exceeded, falling back to slow path;stacktrace=k8s.io/client-go/tools/leaderelection.(*LeaderElector).tryAcquireOrRenew + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:429 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew.func1.1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:285 +k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext.func2 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/loop.go:87 +k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/loop.go:88 +k8s.io/apimachinery/pkg/util/wait.PollUntilContextTimeout + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/poll.go:48 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:283 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.JitterUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:210 +k8s.io/apimachinery/pkg/util/wait.Until + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:163 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).renew + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:282 +k8s.io/client-go/tools/leaderelection.(*LeaderElector).Run + pkg/mod/k8s.io/client-go@v0.33.3/tools/leaderelection/leaderelection.go:221 +sigs.k8s.io/controller-runtime/pkg/manager.(*controllerManager).Start.func3 + pkg/mod/sigs.k8s.io/controller-runtime@v0.21.0/pkg/manager/internal.go:449;ts=2025-08-12T22:48:44Z +.. +level=error;msg=error retrieving resource lock nginx-gateway/ngf-longevity-nginx-gateway-fabric-leader-election: Unauthorized;stacktrace=k8s.io/client-go/tools/leaderelection.(*LeaderElector).tryAcquireOrRenew +.. +level=error;msg=Failed to update lock optimistically: Unauthorized, falling back to slow path;stacktrace=k8s.io/client-go/tools/leaderelection.(*LeaderElector).tryAcquireOrRenew +.. +.. +error=Unauthorized;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError +.. +error=failed to list *v1alpha1.NginxGateway: nginxgateways.gateway.nginx.org is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot list resource "nginxgateways" in API group "gateway.nginx.org" at the cluster scope;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError +.. +error=failed to list *v1.PartialObjectMetadata: customresourcedefinitions.apiextensions.k8s.io is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot list resource "customresourcedefinitions" in API group "apiextensions.k8s.io" at the cluster scope;level=error;logger=controller-runtime.cache.UnhandledError;msg=Failed to watch;reflector=pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:285;stacktrace=k8s.io/apimachinery/pkg/util/runtime.logError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:226 +k8s.io/apimachinery/pkg/util/runtime.handleError + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:217 +k8s.io/apimachinery/pkg/util/runtime.HandleErrorWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/runtime/runtime.go:203 +k8s.io/client-go/tools/cache.DefaultWatchErrorHandler + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:200 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext.func1 + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:360 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:255 +k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:256 +k8s.io/apimachinery/pkg/util/wait.BackoffUntil + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/backoff.go:233 +k8s.io/client-go/tools/cache.(*Reflector).RunWithContext + pkg/mod/k8s.io/client-go@v0.33.3/tools/cache/reflector.go:358 +k8s.io/client-go/tools/cache.(*controller).RunWithContext.(*Group).StartWithContext.func3 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:63 +k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1 + pkg/mod/k8s.io/apimachinery@v0.33.3/pkg/util/wait/wait.go:72;ts=2025-08-12T22:48:34Z;type=*v1.PartialObjectMetadata +.. +level=error;msg=Failed to update lock optimistically: leases.coordination.k8s.io "ngf-longevity-nginx-gateway-fabric-leader-election" is forbidden: User "system:serviceaccount:nginx-gateway:ngf-longevity-nginx-gateway-fabric" cannot update resource "leases" in API group "coordination.k8s.io" in the namespace "nginx-gateway", falling back to slow path;stacktrace=k8s.io/client-go/tools/leaderelection.(*LeaderElector).tryAcquireOrRenew +.. + +### nginx + +Make command errored because there were so many errors in nginx-gateway that none were recorded if any. diff --git a/tests/results/longevity/2.1.0/oss-cpu.png b/tests/results/longevity/2.1.0/oss-cpu.png new file mode 100644 index 0000000000..6f3f0ad5f4 Binary files /dev/null and b/tests/results/longevity/2.1.0/oss-cpu.png differ diff --git a/tests/results/longevity/2.1.0/oss-memory.png b/tests/results/longevity/2.1.0/oss-memory.png new file mode 100644 index 0000000000..36994e292c Binary files /dev/null and b/tests/results/longevity/2.1.0/oss-memory.png differ diff --git a/tests/results/longevity/2.1.0/plus-cpu.png b/tests/results/longevity/2.1.0/plus-cpu.png new file mode 100644 index 0000000000..e33745854a Binary files /dev/null and b/tests/results/longevity/2.1.0/plus-cpu.png differ diff --git a/tests/results/longevity/2.1.0/plus-memory.png b/tests/results/longevity/2.1.0/plus-memory.png new file mode 100644 index 0000000000..5d6dc9b672 Binary files /dev/null and b/tests/results/longevity/2.1.0/plus-memory.png differ diff --git a/tests/results/ngf-upgrade/2.1.0/2.1.0-oss.md b/tests/results/ngf-upgrade/2.1.0/2.1.0-oss.md new file mode 100644 index 0000000000..15e38aa408 --- /dev/null +++ b/tests/results/ngf-upgrade/2.1.0/2.1.0-oss.md @@ -0,0 +1,60 @@ +# Results + +## Test environment + +NGINX Plus: false + +NGINX Gateway Fabric: + +- Commit: 43424f7eafc27e3fed07cef693614a7f389a3359 +- Date: 2025-08-12T16:35:50Z +- Dirty: false + +GKE Cluster: + +- Node count: 12 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 16 +- RAM per node: 65851524Ki +- Max pods per node: 110 +- Zone: us-west1-b +- Instance Type: n2d-standard-16 + +## Summary: + +- This is the first test result since the 2.0 architecture change since you cannot upgrade from 1.6 -> 2.0. With this test setup, there is downtime when upgrading NGF. +- There is a manual workaround for this release which involves uninstalling then re-installing NGF, which should lead to no downtime on the data plane. Hopefully our new readiness probe will fix this issue in the next release. + +## Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 6000, 100.01, 25.74 +Duration [total, attack, wait] 1m23s, 59.992s, 23.188s +Latencies [min, mean, 50, 90, 95, 99, max] 374.346µs, 19.546s, 30s, 30.001s, 30.001s, 30.001s, 30.003s +Bytes In [total, mean] 333996, 55.67 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 35.68% +Status Codes [code:count] 0:3859 200:2141 +Error Set: +Get "https://cafe.example.com/tea": dial tcp 0.0.0.0:0->10.138.0.49:443: connect: connection refused +Get "https://cafe.example.com/tea": context deadline exceeded (Client.Timeout exceeded while awaiting headers) +``` + +![https-oss.png](https-oss.png) + +## Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 6000, 100.01, 25.60 +Duration [total, attack, wait] 1m23s, 59.991s, 23.397s +Latencies [min, mean, 50, 90, 95, 99, max] 369.068µs, 19.374s, 30s, 30.001s, 30.001s, 30.001s, 30.006s +Bytes In [total, mean] 346576, 57.76 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 35.58% +Status Codes [code:count] 0:3865 200:2135 +Error Set: +Get "http://cafe.example.com/coffee": dial tcp 0.0.0.0:0->10.138.0.49:80: connect: connection refused +Get "http://cafe.example.com/coffee": context deadline exceeded (Client.Timeout exceeded while awaiting headers) +``` + +![http-oss.png](http-oss.png) diff --git a/tests/results/ngf-upgrade/2.1.0/2.1.0-plus.md b/tests/results/ngf-upgrade/2.1.0/2.1.0-plus.md new file mode 100644 index 0000000000..5f58aa6e5c --- /dev/null +++ b/tests/results/ngf-upgrade/2.1.0/2.1.0-plus.md @@ -0,0 +1,66 @@ +# Results + +## Test environment + +NGINX Plus: true + +NGINX Gateway Fabric: + +- Commit: 43424f7eafc27e3fed07cef693614a7f389a3359 +- Date: 2025-08-12T16:35:50Z +- Dirty: false + +GKE Cluster: + +- Node count: 12 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 16 +- RAM per node: 65851524Ki +- Max pods per node: 110 +- Zone: us-west1-b +- Instance Type: n2d-standard-16 + +## Summary: + +- This is the first test result since the 2.0 architecture change since you cannot upgrade from 1.6 -> 2.0. With this test setup, there is downtime when upgrading NGF. +- There is a manual workaround for this release which involves uninstalling then re-installing NGF, which should lead to no downtime on the data plane. Hopefully our new readiness probe will fix this issue in the next release. + +## Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 6000, 100.01, 21.19 +Duration [total, attack, wait] 1m27s, 59.992s, 26.893s +Latencies [min, mean, 50, 90, 95, 99, max] 334.577µs, 12.057s, 5.704s, 30.001s, 30.001s, 30.001s, 30.002s +Bytes In [total, mean] 294560, 49.09 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 30.68% +Status Codes [code:count] 0:4159 200:1841 +Error Set: +Get "http://cafe.example.com/coffee": read tcp 10.138.0.105:42937->10.138.0.16:80: read: connection reset by peer +Get "http://cafe.example.com/coffee": read tcp 10.138.0.105:46913->10.138.0.16:80: read: connection reset by peer +Get "http://cafe.example.com/coffee": read tcp 10.138.0.105:59115->10.138.0.16:80: read: connection reset by peer +Get "http://cafe.example.com/coffee": dial tcp 0.0.0.0:0->10.138.0.16:80: connect: connection refused +Get "http://cafe.example.com/coffee": context deadline exceeded (Client.Timeout exceeded while awaiting headers) +``` + +![http-plus.png](http-plus.png) + +## Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 6000, 100.01, 21.19 +Duration [total, attack, wait] 1m27s, 59.994s, 26.888s +Latencies [min, mean, 50, 90, 95, 99, max] 361.283µs, 12.049s, 5.223s, 30.001s, 30.001s, 30.001s, 30.005s +Bytes In [total, mean] 284125, 47.35 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 30.68% +Status Codes [code:count] 0:4159 200:1841 +Error Set: +Get "https://cafe.example.com/tea": read tcp 10.138.0.105:36439->10.138.0.16:443: read: connection reset by peer +Get "https://cafe.example.com/tea": read tcp 10.138.0.105:52115->10.138.0.16:443: read: connection reset by peer +Get "https://cafe.example.com/tea": read tcp 10.138.0.105:37505->10.138.0.16:443: read: connection reset by peer +Get "https://cafe.example.com/tea": dial tcp 0.0.0.0:0->10.138.0.16:443: connect: connection refused +Get "https://cafe.example.com/tea": context deadline exceeded (Client.Timeout exceeded while awaiting headers) +``` + +![https-plus.png](https-plus.png) diff --git a/tests/results/ngf-upgrade/2.1.0/http-oss.png b/tests/results/ngf-upgrade/2.1.0/http-oss.png new file mode 100644 index 0000000000..2b637cabd0 Binary files /dev/null and b/tests/results/ngf-upgrade/2.1.0/http-oss.png differ diff --git a/tests/results/ngf-upgrade/2.1.0/http-plus.png b/tests/results/ngf-upgrade/2.1.0/http-plus.png new file mode 100644 index 0000000000..ee0077852f Binary files /dev/null and b/tests/results/ngf-upgrade/2.1.0/http-plus.png differ diff --git a/tests/results/ngf-upgrade/2.1.0/https-oss.png b/tests/results/ngf-upgrade/2.1.0/https-oss.png new file mode 100644 index 0000000000..ef6412887b Binary files /dev/null and b/tests/results/ngf-upgrade/2.1.0/https-oss.png differ diff --git a/tests/results/ngf-upgrade/2.1.0/https-plus.png b/tests/results/ngf-upgrade/2.1.0/https-plus.png new file mode 100644 index 0000000000..7b5d29c645 Binary files /dev/null and b/tests/results/ngf-upgrade/2.1.0/https-plus.png differ diff --git a/tests/results/reconfig/2.1.0/2.1.0-oss.md b/tests/results/reconfig/2.1.0/2.1.0-oss.md new file mode 100644 index 0000000000..cb7b14eab1 --- /dev/null +++ b/tests/results/reconfig/2.1.0/2.1.0-oss.md @@ -0,0 +1,153 @@ +# Results + +## Test environment + +NGINX Plus: false + +NGINX Gateway Fabric: + +- Commit: 43424f7eafc27e3fed07cef693614a7f389a3359 +- Date: 2025-08-12T16:35:50Z +- Dirty: false + +GKE Cluster: + +- Node count: 12 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 16 +- RAM per node: 65851524Ki +- Max pods per node: 110 +- Zone: us-west1-b +- Instance Type: n2d-standard-16 + +## Summary: + +- Time to ready increased. Increase is more exacerbated when NumResources is 150. +- Jumbled configuration issues still exist as seen in 2.0 results. + +## Test 1: Resources exist before startup - NumResources 30 + +### Time to Ready + +Time To Ready Description: From when NGF starts to when the NGINX configuration is fully configured +- TimeToReadyTotal: 15s + +### Event Batch Processing + +- Event Batch Total: 10 +- Event Batch Processing Average Time: 3ms +- Event Batch Processing distribution: + - 500.0ms: 10 + - 1000.0ms: 10 + - 5000.0ms: 10 + - 10000.0ms: 10 + - 30000.0ms: 10 + - +Infms: 10 + +### NGINX Error Logs + +## Test 1: Resources exist before startup - NumResources 150 + +### Time to Ready + +Time To Ready Description: From when NGF starts to when the NGINX configuration is fully configured +- TimeToReadyTotal: 21s + +### Event Batch Processing + +- Event Batch Total: 9 +- Event Batch Processing Average Time: 8ms +- Event Batch Processing distribution: + - 500.0ms: 9 + - 1000.0ms: 9 + - 5000.0ms: 9 + - 10000.0ms: 9 + - 30000.0ms: 9 + - +Infms: 9 + +### NGINX Error Logs + +## Test 2: Start NGF, deploy Gateway, wait until NGINX agent instance connects to NGF, create many resources attached to GW - NumResources 30 + +### Time to Ready + +Time To Ready Description: From when NGINX receives the first configuration created by NGF to when the NGINX configuration is fully configured +- TimeToReadyTotal: 24s + +### Event Batch Processing + +- Event Batch Total: 309 +- Event Batch Processing Average Time: 11ms +- Event Batch Processing distribution: + - 500.0ms: 309 + - 1000.0ms: 309 + - 5000.0ms: 309 + - 10000.0ms: 309 + - 30000.0ms: 309 + - +Infms: 309 + +### NGINX Error Logs +2025/08/12 17:03:03 [emerg] 8#8: invalid number of arguments in "zone" directive in /etc/nginx/conf.d/http.conf:174 +2025/08/12 17:03:05 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:935 +2025/08/12 17:03:06 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:1822 +2025/08/12 17:03:07 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:2158 +2025/08/12 17:03:07 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:2396 +2025/08/12 17:03:09 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:3348 + +## Test 2: Start NGF, deploy Gateway, wait until NGINX agent instance connects to NGF, create many resources attached to GW - NumResources 150 + +### Time to Ready + +Time To Ready Description: From when NGINX receives the first configuration created by NGF to when the NGINX configuration is fully configured +- TimeToReadyTotal: 128s + +### Event Batch Processing + +- Event Batch Total: 1460 +- Event Batch Processing Average Time: 17ms +- Event Batch Processing distribution: + - 500.0ms: 1460 + - 1000.0ms: 1460 + - 5000.0ms: 1460 + - 10000.0ms: 1460 + - 30000.0ms: 1460 + - +Infms: 1460 + +### NGINX Error Logs +2025/08/12 17:07:27 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:461 +2025/08/12 17:07:28 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:1054 +2025/08/12 17:07:29 [emerg] 8#8: pread() returned only 0 bytes instead of 4095 in /etc/nginx/conf.d/http.conf:1397 +2025/08/12 17:07:32 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:3131 +2025/08/12 17:07:32 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:3369 +2025/08/12 17:07:33 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:3790 +2025/08/12 17:07:34 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:4504 +2025/08/12 17:07:36 [emerg] 8#8: pread() returned only 0 bytes instead of 4095 in /etc/nginx/conf.d/http.conf:473 +2025/08/12 17:07:36 [emerg] 8#8: pread() returned only 0 bytes instead of 4086 in /etc/nginx/conf.d/http.conf:2080 +2025/08/12 17:07:37 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:5932 +2025/08/12 17:07:37 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:6051 +2025/08/12 17:07:37 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:6344 +2025/08/12 17:07:38 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:6701 +2025/08/12 17:07:39 [emerg] 8#8: pread() returned only 0 bytes instead of 4079 in /etc/nginx/conf.d/http.conf:2795 +2025/08/12 17:07:40 [emerg] 8#8: pread() returned only 0 bytes instead of 4095 in /etc/nginx/conf.d/http.conf:4649 +2025/08/12 17:07:41 [emerg] 8#8: duplicate upstream "namespace62_coffeenamespace62_80" in /etc/nginx/conf.d/http.conf:7876 +2025/08/12 17:07:42 [emerg] 8#8: pread() returned only 0 bytes instead of 4095 in /etc/nginx/conf.d/http.conf:7686 +2025/08/12 17:07:42 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:8745 +2025/08/12 17:07:42 [emerg] 8#8: pread() returned only 0 bytes instead of 4092 in /etc/nginx/conf.d/http.conf:2628 +2025/08/12 17:07:42 [emerg] 8#8: unexpected end of file, expecting "}" in /etc/nginx/conf.d/http.conf:9146 +2025/08/12 17:07:43 [emerg] 8#8: unexpected end of file, expecting "}" in /etc/nginx/conf.d/http.conf:9265 +2025/08/12 17:07:43 [emerg] 8#8: pread() returned only 0 bytes instead of 4093 in /etc/nginx/conf.d/http.conf:6150 +2025/08/12 17:07:43 [emerg] 8#8: pread() returned only 0 bytes instead of 4086 in /etc/nginx/conf.d/http.conf:7337 +2025/08/12 17:07:44 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:9860 +2025/08/12 17:07:44 [emerg] 8#8: pread() returned only 0 bytes instead of 4088 in /etc/nginx/conf.d/http.conf:9106 +2025/08/12 17:07:46 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:11276 +2025/08/12 17:07:48 [emerg] 8#8: pread() returned only 0 bytes instead of 4090 in /etc/nginx/conf.d/http.conf:2382 +2025/08/12 17:07:48 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:12868 +2025/08/12 17:07:49 [emerg] 8#8: pread() returned only 0 bytes instead of 4092 in /etc/nginx/conf.d/http.conf:2416 +2025/08/12 17:07:50 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:13701 +2025/08/12 17:07:50 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:13960 +2025/08/12 17:07:51 [emerg] 8#8: pread() returned only 0 bytes instead of 4095 in /etc/nginx/conf.d/http.conf:5827 +2025/08/12 17:07:51 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:14620 +2025/08/12 17:07:52 [emerg] 8#8: pread() returned only 0 bytes instead of 4095 in /etc/nginx/conf.d/http.conf:8849 +2025/08/12 17:07:53 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:15560 +2025/08/12 17:07:53 [emerg] 8#8: pread() returned only 0 bytes instead of 4046 in /etc/nginx/conf.d/http.conf:1758 +2025/08/12 17:07:56 [emerg] 8#8: unexpected end of file, expecting ";" or "}" in /etc/nginx/conf.d/http.conf:16676 diff --git a/tests/results/reconfig/2.1.0/2.1.0-plus.md b/tests/results/reconfig/2.1.0/2.1.0-plus.md new file mode 100644 index 0000000000..abb06bf55f --- /dev/null +++ b/tests/results/reconfig/2.1.0/2.1.0-plus.md @@ -0,0 +1,109 @@ +# Results + +## Test environment + +NGINX Plus: true + +NGINX Gateway Fabric: + +- Commit: 43424f7eafc27e3fed07cef693614a7f389a3359 +- Date: 2025-08-12T16:35:50Z +- Dirty: false + +GKE Cluster: + +- Node count: 12 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 16 +- RAM per node: 65851524Ki +- Max pods per node: 110 +- Zone: us-west1-b +- Instance Type: n2d-standard-16 + +## Summary: + +- Time to ready increased. Increase is more exacerbated when NumResources is 150. + +## Test 1: Resources exist before startup - NumResources 30 + +### Time to Ready + +Time To Ready Description: From when NGF starts to when the NGINX configuration is fully configured +- TimeToReadyTotal: 18s + +### Event Batch Processing + +- Event Batch Total: 9 +- Event Batch Processing Average Time: 19ms +- Event Batch Processing distribution: + - 500.0ms: 9 + - 1000.0ms: 9 + - 5000.0ms: 9 + - 10000.0ms: 9 + - 30000.0ms: 9 + - +Infms: 9 + +### NGINX Error Logs + +## Test 1: Resources exist before startup - NumResources 150 + +### Time to Ready + +Time To Ready Description: From when NGF starts to when the NGINX configuration is fully configured +- TimeToReadyTotal: 27s + +### Event Batch Processing + +- Event Batch Total: 8 +- Event Batch Processing Average Time: 21ms +- Event Batch Processing distribution: + - 500.0ms: 8 + - 1000.0ms: 8 + - 5000.0ms: 8 + - 10000.0ms: 8 + - 30000.0ms: 8 + - +Infms: 8 + +### NGINX Error Logs + +## Test 2: Start NGF, deploy Gateway, wait until NGINX agent instance connects to NGF, create many resources attached to GW - NumResources 30 + +### Time to Ready + +Time To Ready Description: From when NGINX receives the first configuration created by NGF to when the NGINX configuration is fully configured +- TimeToReadyTotal: 25s + +### Event Batch Processing + +- Event Batch Total: 222 +- Event Batch Processing Average Time: 36ms +- Event Batch Processing distribution: + - 500.0ms: 213 + - 1000.0ms: 222 + - 5000.0ms: 222 + - 10000.0ms: 222 + - 30000.0ms: 222 + - +Infms: 222 + +### NGINX Error Logs + +## Test 2: Start NGF, deploy Gateway, wait until NGINX agent instance connects to NGF, create many resources attached to GW - NumResources 150 + +### Time to Ready + +Time To Ready Description: From when NGINX receives the first configuration created by NGF to when the NGINX configuration is fully configured +- TimeToReadyTotal: 128s + +### Event Batch Processing + +- Event Batch Total: 1080 +- Event Batch Processing Average Time: 32ms +- Event Batch Processing distribution: + - 500.0ms: 1053 + - 1000.0ms: 1066 + - 5000.0ms: 1080 + - 10000.0ms: 1080 + - 30000.0ms: 1080 + - +Infms: 1080 + +### NGINX Error Logs diff --git a/tests/results/scale/2.1.0/2.1.0-oss.md b/tests/results/scale/2.1.0/2.1.0-oss.md new file mode 100644 index 0000000000..18e3ee776e --- /dev/null +++ b/tests/results/scale/2.1.0/2.1.0-oss.md @@ -0,0 +1,158 @@ +# Results + +## Test environment + +NGINX Plus: false + +NGINX Gateway Fabric: + +- Commit: 43424f7eafc27e3fed07cef693614a7f389a3359 +- Date: 2025-08-12T16:35:50Z +- Dirty: false + +GKE Cluster: + +- Node count: 12 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 16 +- RAM per node: 65851524Ki +- Max pods per node: 110 +- Zone: us-west1-b +- Instance Type: n2d-standard-16 + +## Summary: + +- Memory, TTR, and Latency all stayed relatively consistent with 2.0 results. +- Tests which previously errored saw number of errors increase. +- Had a single new error in scale HTTPSListeners + +## Test TestScale_Listeners + +### Event Batch Processing + +- Total: 326 +- Average Time: 6ms +- Event Batch Processing distribution: + - 500.0ms: 326 + - 1000.0ms: 326 + - 5000.0ms: 326 + - 10000.0ms: 326 + - 30000.0ms: 326 + - +Infms: 326 + +### Errors + +- NGF errors: 22 +- NGF container restarts: 0 +- NGINX errors: 0 +- NGINX container restarts: 0 + +### Graphs and Logs + +See [output directory](./TestScale_Listeners) for more details. +The logs are attached only if there are errors. + +## Test TestScale_HTTPSListeners + +### Event Batch Processing + +- Total: 389 +- Average Time: 18ms +- Event Batch Processing distribution: + - 500.0ms: 389 + - 1000.0ms: 389 + - 5000.0ms: 389 + - 10000.0ms: 389 + - 30000.0ms: 389 + - +Infms: 389 + +### Errors + +- NGF errors: 29 +- NGF container restarts: 0 +- NGINX errors: 0 +- NGINX container restarts: 0 + +Had a new error which hadn't been seen in the 2.0 results: +{"level":"error","ts":"2025-08-12T18:20:59Z","logger":"statusUpdater","msg":"Failed to update status","namespace":"scale","name":"gateway","kind":"","error":"timed out waiting for the condition","stacktrace":"github.com/nginx/nginx-gateway-fabric/v2/internal/controller/status.(*Updater).writeStatuses\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/status/updater.go:112\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller/status.(*Updater).Update\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/status/updater.go:83\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller/status.(*LeaderAwareGroupUpdater).UpdateGroup\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/status/leader_aware_group_updater.go:54\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller.(*eventHandlerImpl).updateStatuses\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/handler.go:373\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller.(*eventHandlerImpl).waitForStatusUpdates\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/handler.go:273"} + + +### Graphs and Logs + +See [output directory](./TestScale_HTTPSListeners) for more details. +The logs are attached only if there are errors. + +## Test TestScale_HTTPRoutes + +### Event Batch Processing + +- Total: 1009 +- Average Time: 90ms +- Event Batch Processing distribution: + - 500.0ms: 1009 + - 1000.0ms: 1009 + - 5000.0ms: 1009 + - 10000.0ms: 1009 + - 30000.0ms: 1009 + - +Infms: 1009 + +### Errors + +- NGF errors: 0 +- NGF container restarts: 0 +- NGINX errors: 0 +- NGINX container restarts: 0 + +### Graphs and Logs + +See [output directory](./TestScale_HTTPRoutes) for more details. +The logs are attached only if there are errors. + +## Test TestScale_UpstreamServers + +### Event Batch Processing + +- Total: 111 +- Average Time: 68ms +- Event Batch Processing distribution: + - 500.0ms: 111 + - 1000.0ms: 111 + - 5000.0ms: 111 + - 10000.0ms: 111 + - 30000.0ms: 111 + - +Infms: 111 + +### Errors + +- NGF errors: 4 +- NGF container restarts: 0 +- NGINX errors: 0 +- NGINX container restarts: 0 + +### Graphs and Logs + +See [output directory](./TestScale_UpstreamServers) for more details. +The logs are attached only if there are errors. + +## Test TestScale_HTTPMatches + +```text +Requests [total, rate, throughput] 30000, 1000.04, 1000.01 +Duration [total, attack, wait] 30s, 29.999s, 907.868µs +Latencies [min, mean, 50, 90, 95, 99, max] 727.76µs, 947.864µs, 923.24µs, 1.032ms, 1.076ms, 1.273ms, 22.752ms +Bytes In [total, mean] 4830000, 161.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` +```text +Requests [total, rate, throughput] 30000, 1000.04, 1000.00 +Duration [total, attack, wait] 30s, 29.999s, 1.035ms +Latencies [min, mean, 50, 90, 95, 99, max] 835.347µs, 1.052ms, 1.027ms, 1.154ms, 1.22ms, 1.389ms, 23.664ms +Bytes In [total, mean] 4830000, 161.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` diff --git a/tests/results/scale/2.1.0/2.1.0-plus.md b/tests/results/scale/2.1.0/2.1.0-plus.md new file mode 100644 index 0000000000..da3d0f23e9 --- /dev/null +++ b/tests/results/scale/2.1.0/2.1.0-plus.md @@ -0,0 +1,153 @@ +# Results + +## Test environment + +NGINX Plus: true + +NGINX Gateway Fabric: + +- Commit: 43424f7eafc27e3fed07cef693614a7f389a3359 +- Date: 2025-08-12T16:35:50Z +- Dirty: false + +GKE Cluster: + +- Node count: 12 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 16 +- RAM per node: 65851524Ki +- Max pods per node: 110 +- Zone: us-west1-b +- Instance Type: n2d-standard-16 + +## Summary: + +- Memory, TTR, and Latency all stayed relatively consistent with 2.0 results. +- Number of errors stayed consistent with 2.0 results. + +## Test TestScale_Listeners + +### Event Batch Processing + +- Total: 329 +- Average Time: 122ms +- Event Batch Processing distribution: + - 500.0ms: 288 + - 1000.0ms: 329 + - 5000.0ms: 329 + - 10000.0ms: 329 + - 30000.0ms: 329 + - +Infms: 329 + +### Errors + +- NGF errors: 11 +- NGF container restarts: 0 +- NGINX errors: 172 +- NGINX container restarts: 0 + +### Graphs and Logs + +See [output directory](./TestScale_Listeners) for more details. +The logs are attached only if there are errors. + +## Test TestScale_HTTPSListeners + +### Event Batch Processing + +- Total: 390 +- Average Time: 119ms +- Event Batch Processing distribution: + - 500.0ms: 344 + - 1000.0ms: 390 + - 5000.0ms: 390 + - 10000.0ms: 390 + - 30000.0ms: 390 + - +Infms: 390 + +### Errors + +- NGF errors: 7 +- NGF container restarts: 0 +- NGINX errors: 161 +- NGINX container restarts: 0 + +### Graphs and Logs + +See [output directory](./TestScale_HTTPSListeners) for more details. +The logs are attached only if there are errors. + +## Test TestScale_HTTPRoutes + +### Event Batch Processing + +- Total: 1009 +- Average Time: 170ms +- Event Batch Processing distribution: + - 500.0ms: 1002 + - 1000.0ms: 1009 + - 5000.0ms: 1009 + - 10000.0ms: 1009 + - 30000.0ms: 1009 + - +Infms: 1009 + +### Errors + +- NGF errors: 0 +- NGF container restarts: 0 +- NGINX errors: 0 +- NGINX container restarts: 0 + +### Graphs and Logs + +See [output directory](./TestScale_HTTPRoutes) for more details. +The logs are attached only if there are errors. + +## Test TestScale_UpstreamServers + +### Event Batch Processing + +- Total: 55 +- Average Time: 390ms +- Event Batch Processing distribution: + - 500.0ms: 34 + - 1000.0ms: 54 + - 5000.0ms: 55 + - 10000.0ms: 55 + - 30000.0ms: 55 + - +Infms: 55 + +### Errors + +- NGF errors: 3 +- NGF container restarts: 0 +- NGINX errors: 0 +- NGINX container restarts: 0 + +### Graphs and Logs + +See [output directory](./TestScale_UpstreamServers) for more details. +The logs are attached only if there are errors. + +## Test TestScale_HTTPMatches + +```text +Requests [total, rate, throughput] 30000, 1000.04, 1000.01 +Duration [total, attack, wait] 30s, 29.999s, 669.782µs +Latencies [min, mean, 50, 90, 95, 99, max] 537.887µs, 711.738µs, 677.087µs, 798.22µs, 844.473µs, 1.059ms, 32.568ms +Bytes In [total, mean] 4800000, 160.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` +```text +Requests [total, rate, throughput] 30000, 1000.03, 1000.01 +Duration [total, attack, wait] 30s, 29.999s, 831.129µs +Latencies [min, mean, 50, 90, 95, 99, max] 650.119µs, 841.329µs, 798.543µs, 986.788µs, 1.046ms, 1.226ms, 24.473ms +Bytes In [total, mean] 4800000, 160.00 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` diff --git a/tests/results/scale/2.1.0/TestScale_HTTPRoutes/cpu-oss.png b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/cpu-oss.png new file mode 100644 index 0000000000..f7b8517059 Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/cpu-oss.png differ diff --git a/tests/results/scale/2.1.0/TestScale_HTTPRoutes/cpu-plus.png b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/cpu-plus.png new file mode 100644 index 0000000000..65572a8e3f Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/cpu-plus.png differ diff --git a/tests/results/scale/2.1.0/TestScale_HTTPRoutes/memory-oss.png b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/memory-oss.png new file mode 100644 index 0000000000..1ea868fc7e Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/memory-oss.png differ diff --git a/tests/results/scale/2.1.0/TestScale_HTTPRoutes/memory-plus.png b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/memory-plus.png new file mode 100644 index 0000000000..f8f00e1552 Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/memory-plus.png differ diff --git a/tests/results/scale/2.1.0/TestScale_HTTPRoutes/ttr-oss.png b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/ttr-oss.png new file mode 100644 index 0000000000..115d775512 Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/ttr-oss.png differ diff --git a/tests/results/scale/2.1.0/TestScale_HTTPRoutes/ttr-plus.png b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/ttr-plus.png new file mode 100644 index 0000000000..c3ffa9dbab Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPRoutes/ttr-plus.png differ diff --git a/tests/results/scale/2.1.0/TestScale_HTTPSListeners/cpu-oss.png b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/cpu-oss.png new file mode 100644 index 0000000000..52b299a199 Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/cpu-oss.png differ diff --git a/tests/results/scale/2.1.0/TestScale_HTTPSListeners/cpu-plus.png b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/cpu-plus.png new file mode 100644 index 0000000000..4fab35b26a Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/cpu-plus.png differ diff --git a/tests/results/scale/2.1.0/TestScale_HTTPSListeners/memory-oss.png b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/memory-oss.png new file mode 100644 index 0000000000..9e70a7cb2d Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/memory-oss.png differ diff --git a/tests/results/scale/2.1.0/TestScale_HTTPSListeners/memory-plus.png b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/memory-plus.png new file mode 100644 index 0000000000..b8685735c9 Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/memory-plus.png differ diff --git a/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ngf-oss.log b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ngf-oss.log new file mode 100644 index 0000000000..4a80a3af16 --- /dev/null +++ b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ngf-oss.log @@ -0,0 +1,29 @@ +{"level":"debug","ts":"2025-08-12T18:18:41Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gatewayclasses.gateway.networking.k8s.io \"nginx\": the object has been modified; please apply your changes to the latest version and try again","namespace":"","name":"nginx","kind":"GatewayClass"} +{"level":"debug","ts":"2025-08-12T18:19:24Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:19Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:20Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:21Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:22Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:24Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:27Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:28Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:31Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:32Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:33Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:36Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:42Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:42Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:44Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:46Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:47Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:48Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:49Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:51Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:56Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:56Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:57Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:20:59Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"error","ts":"2025-08-12T18:20:59Z","logger":"statusUpdater","msg":"Failed to update status","namespace":"scale","name":"gateway","kind":"","error":"timed out waiting for the condition","stacktrace":"github.com/nginx/nginx-gateway-fabric/v2/internal/controller/status.(*Updater).writeStatuses\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/status/updater.go:112\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller/status.(*Updater).Update\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/status/updater.go:83\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller/status.(*LeaderAwareGroupUpdater).UpdateGroup\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/status/leader_aware_group_updater.go:54\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller.(*eventHandlerImpl).updateStatuses\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/handler.go:373\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller.(*eventHandlerImpl).waitForStatusUpdates\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/handler.go:273"} +{"level":"debug","ts":"2025-08-12T18:21:00Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:21:01Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:21:03Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} diff --git a/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ngf-plus.log b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ngf-plus.log new file mode 100644 index 0000000000..fdc6cb2fec --- /dev/null +++ b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ngf-plus.log @@ -0,0 +1,7 @@ +{"level":"debug","ts":"2025-08-12T18:26:45Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:27:46Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:27:48Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:27:52Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:27:54Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:28:00Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:28:01Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} diff --git a/tests/results/scale/2.1.0/TestScale_HTTPSListeners/nginx-plus.log b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/nginx-plus.log new file mode 100644 index 0000000000..6ea1e5ea2e --- /dev/null +++ b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/nginx-plus.log @@ -0,0 +1,161 @@ +2025/08/12 18:27:35 [error] 148#148: *46 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 1.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-1_80/", host: "1.example.com" +2025/08/12 18:27:35 [error] 149#149: *47 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 1.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-1_80/", host: "1.example.com" +2025/08/12 18:27:35 [error] 151#151: *48 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 1.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-1_80/", host: "1.example.com" +2025/08/12 18:27:36 [error] 152#152: *49 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 1.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-1_80/", host: "1.example.com" +2025/08/12 18:27:36 [error] 184#184: *63 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 2.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-2_80/", host: "2.example.com" +2025/08/12 18:27:36 [error] 185#185: *64 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 2.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-2_80/", host: "2.example.com" +2025/08/12 18:27:36 [error] 187#187: *65 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 2.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-2_80/", host: "2.example.com" +2025/08/12 18:27:36 [error] 188#188: *66 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 2.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-2_80/", host: "2.example.com" +2025/08/12 18:27:37 [error] 220#220: *83 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 3.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-3_80/", host: "3.example.com" +2025/08/12 18:27:37 [error] 221#221: *84 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 3.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-3_80/", host: "3.example.com" +2025/08/12 18:27:37 [error] 223#223: *85 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 3.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-3_80/", host: "3.example.com" +2025/08/12 18:27:37 [error] 224#224: *86 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 3.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-3_80/", host: "3.example.com" +2025/08/12 18:27:38 [error] 257#257: *102 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 4.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-4_80/", host: "4.example.com" +2025/08/12 18:27:38 [error] 258#258: *103 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 4.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-4_80/", host: "4.example.com" +2025/08/12 18:27:38 [error] 260#260: *104 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 4.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-4_80/", host: "4.example.com" +2025/08/12 18:27:38 [error] 261#261: *106 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 4.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-4_80/", host: "4.example.com" +2025/08/12 18:27:39 [error] 293#293: *123 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 5.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-5_80/", host: "5.example.com" +2025/08/12 18:27:39 [error] 294#294: *124 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 5.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-5_80/", host: "5.example.com" +2025/08/12 18:27:39 [error] 296#296: *125 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 5.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-5_80/", host: "5.example.com" +2025/08/12 18:27:39 [error] 297#297: *126 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 5.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-5_80/", host: "5.example.com" +2025/08/12 18:27:40 [error] 329#329: *144 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 6.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-6_80/", host: "6.example.com" +2025/08/12 18:27:40 [error] 330#330: *145 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 6.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-6_80/", host: "6.example.com" +2025/08/12 18:27:40 [error] 332#332: *146 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 6.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-6_80/", host: "6.example.com" +2025/08/12 18:27:40 [error] 333#333: *147 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 6.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-6_80/", host: "6.example.com" +2025/08/12 18:27:41 [error] 365#365: *166 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 7.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-7_80/", host: "7.example.com" +2025/08/12 18:27:41 [error] 366#366: *167 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 7.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-7_80/", host: "7.example.com" +2025/08/12 18:27:41 [error] 368#368: *168 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 7.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-7_80/", host: "7.example.com" +2025/08/12 18:27:41 [error] 369#369: *169 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 7.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-7_80/", host: "7.example.com" +2025/08/12 18:27:42 [error] 402#402: *189 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 8.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-8_80/", host: "8.example.com" +2025/08/12 18:27:42 [error] 403#403: *190 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 8.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-8_80/", host: "8.example.com" +2025/08/12 18:27:42 [error] 405#405: *192 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 8.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-8_80/", host: "8.example.com" +2025/08/12 18:27:42 [error] 406#406: *193 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 8.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-8_80/", host: "8.example.com" +2025/08/12 18:27:42 [error] 407#407: *194 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 8.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-8_80/", host: "8.example.com" +2025/08/12 18:27:43 [error] 439#439: *215 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 9.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-9_80/", host: "9.example.com" +2025/08/12 18:27:43 [error] 440#440: *216 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 9.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-9_80/", host: "9.example.com" +2025/08/12 18:27:43 [error] 442#442: *217 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 9.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-9_80/", host: "9.example.com" +2025/08/12 18:27:43 [error] 443#443: *218 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 9.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-9_80/", host: "9.example.com" +2025/08/12 18:27:45 [error] 475#475: *234 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 10.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-10_80/", host: "10.example.com" +2025/08/12 18:27:45 [error] 476#476: *235 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 10.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-10_80/", host: "10.example.com" +2025/08/12 18:27:45 [error] 478#478: *236 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 10.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-10_80/", host: "10.example.com" +2025/08/12 18:27:45 [error] 511#511: *259 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 11.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-11_80/", host: "11.example.com" +2025/08/12 18:27:45 [error] 512#512: *260 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 11.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-11_80/", host: "11.example.com" +2025/08/12 18:27:46 [error] 514#514: *261 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 11.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-11_80/", host: "11.example.com" +2025/08/12 18:27:46 [error] 515#515: *262 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 11.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-11_80/", host: "11.example.com" +2025/08/12 18:27:46 [error] 547#547: *286 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 12.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-12_80/", host: "12.example.com" +2025/08/12 18:27:46 [error] 548#548: *287 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 12.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-12_80/", host: "12.example.com" +2025/08/12 18:27:46 [error] 550#550: *288 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 12.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-12_80/", host: "12.example.com" +2025/08/12 18:27:47 [error] 551#551: *289 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 12.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-12_80/", host: "12.example.com" +2025/08/12 18:27:47 [error] 584#584: *315 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 13.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-13_80/", host: "13.example.com" +2025/08/12 18:27:47 [error] 585#585: *316 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 13.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-13_80/", host: "13.example.com" +2025/08/12 18:27:47 [error] 587#587: *317 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 13.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-13_80/", host: "13.example.com" +2025/08/12 18:27:48 [error] 588#588: *318 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 13.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-13_80/", host: "13.example.com" +2025/08/12 18:27:48 [error] 620#620: *345 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 14.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-14_80/", host: "14.example.com" +2025/08/12 18:27:48 [error] 621#621: *346 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 14.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-14_80/", host: "14.example.com" +2025/08/12 18:27:49 [error] 623#623: *347 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 14.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-14_80/", host: "14.example.com" +2025/08/12 18:27:49 [error] 624#624: *348 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 14.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-14_80/", host: "14.example.com" +2025/08/12 18:27:49 [error] 656#656: *375 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 15.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-15_80/", host: "15.example.com" +2025/08/12 18:27:49 [error] 657#657: *376 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 15.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-15_80/", host: "15.example.com" +2025/08/12 18:27:49 [error] 659#659: *377 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 15.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-15_80/", host: "15.example.com" +2025/08/12 18:27:50 [error] 660#660: *378 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 15.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-15_80/", host: "15.example.com" +2025/08/12 18:27:50 [error] 692#692: *406 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 16.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-16_80/", host: "16.example.com" +2025/08/12 18:27:51 [error] 693#693: *407 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 16.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-16_80/", host: "16.example.com" +2025/08/12 18:27:51 [error] 695#695: *408 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 16.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-16_80/", host: "16.example.com" +2025/08/12 18:27:51 [error] 696#696: *409 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 16.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-16_80/", host: "16.example.com" +2025/08/12 18:27:52 [error] 728#728: *438 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 17.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-17_80/", host: "17.example.com" +2025/08/12 18:27:52 [error] 729#729: *439 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 17.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-17_80/", host: "17.example.com" +2025/08/12 18:27:52 [error] 731#731: *441 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 17.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-17_80/", host: "17.example.com" +2025/08/12 18:27:52 [error] 732#732: *442 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 17.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-17_80/", host: "17.example.com" +2025/08/12 18:27:53 [error] 765#765: *472 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 18.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-18_80/", host: "18.example.com" +2025/08/12 18:27:53 [error] 766#766: *473 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 18.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-18_80/", host: "18.example.com" +2025/08/12 18:27:53 [error] 768#768: *474 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 18.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-18_80/", host: "18.example.com" +2025/08/12 18:27:53 [error] 769#769: *475 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 18.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-18_80/", host: "18.example.com" +2025/08/12 18:27:54 [error] 801#801: *506 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 19.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-19_80/", host: "19.example.com" +2025/08/12 18:27:54 [error] 802#802: *507 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 19.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-19_80/", host: "19.example.com" +2025/08/12 18:27:54 [error] 804#804: *508 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 19.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-19_80/", host: "19.example.com" +2025/08/12 18:27:54 [error] 805#805: *509 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 19.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-19_80/", host: "19.example.com" +2025/08/12 18:27:55 [error] 837#837: *542 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 20.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-20_80/", host: "20.example.com" +2025/08/12 18:27:55 [error] 838#838: *543 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 20.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-20_80/", host: "20.example.com" +2025/08/12 18:27:55 [error] 840#840: *544 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 20.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-20_80/", host: "20.example.com" +2025/08/12 18:27:55 [error] 841#841: *545 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 20.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-20_80/", host: "20.example.com" +2025/08/12 18:27:55 [error] 842#842: *546 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 20.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-20_80/", host: "20.example.com" +2025/08/12 18:27:56 [error] 873#873: *579 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 21.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-21_80/", host: "21.example.com" +2025/08/12 18:27:56 [error] 874#874: *580 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 21.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-21_80/", host: "21.example.com" +2025/08/12 18:27:56 [error] 876#876: *581 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 21.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-21_80/", host: "21.example.com" +2025/08/12 18:27:56 [error] 877#877: *582 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 21.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-21_80/", host: "21.example.com" +2025/08/12 18:27:56 [error] 878#878: *583 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 21.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-21_80/", host: "21.example.com" +2025/08/12 18:27:57 [error] 910#910: *618 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 22.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-22_80/", host: "22.example.com" +2025/08/12 18:27:57 [error] 911#911: *619 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 22.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-22_80/", host: "22.example.com" +2025/08/12 18:27:57 [error] 913#913: *620 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 22.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-22_80/", host: "22.example.com" +2025/08/12 18:27:58 [error] 914#914: *621 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 22.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-22_80/", host: "22.example.com" +2025/08/12 18:27:58 [error] 915#915: *622 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 22.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-22_80/", host: "22.example.com" +2025/08/12 18:27:58 [error] 946#946: *659 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 23.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-23_80/", host: "23.example.com" +2025/08/12 18:27:58 [error] 947#947: *660 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 23.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-23_80/", host: "23.example.com" +2025/08/12 18:27:59 [error] 949#949: *661 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 23.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-23_80/", host: "23.example.com" +2025/08/12 18:27:59 [error] 950#950: *662 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 23.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-23_80/", host: "23.example.com" +2025/08/12 18:27:59 [error] 951#951: *663 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 23.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-23_80/", host: "23.example.com" +2025/08/12 18:28:03 [error] 1092#1092: *823 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 27.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-27_80/", host: "27.example.com" +2025/08/12 18:28:06 [error] 1200#1200: *940 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 30.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-30_80/", host: "30.example.com" +2025/08/12 18:28:08 [error] 1273#1273: *1033 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 32.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-32_80/", host: "32.example.com" +2025/08/12 18:28:10 [error] 1345#1345: *1122 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 34.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-34_80/", host: "34.example.com" +2025/08/12 18:28:11 [error] 1381#1381: *1167 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 35.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-35_80/", host: "35.example.com" +2025/08/12 18:28:13 [error] 1418#1418: *1214 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 36.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-36_80/", host: "36.example.com" +2025/08/12 18:28:14 [error] 1454#1454: *1266 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 37.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-37_80/", host: "37.example.com" +2025/08/12 18:28:15 [error] 1490#1490: *1306 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 38.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-38_80/", host: "38.example.com" +2025/08/12 18:28:16 [error] 1491#1491: *1316 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 38.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-38_80/", host: "38.example.com" +2025/08/12 18:28:17 [error] 1526#1526: *1362 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 39.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-39_80/", host: "39.example.com" +2025/08/12 18:28:18 [error] 1562#1562: *1415 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 40.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-40_80/", host: "40.example.com" +2025/08/12 18:28:19 [error] 1598#1598: *1461 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 41.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-41_80/", host: "41.example.com" +2025/08/12 18:28:19 [error] 1599#1599: *1474 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 41.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-41_80/", host: "41.example.com" +2025/08/12 18:28:20 [error] 1634#1634: *1515 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 42.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-42_80/", host: "42.example.com" +2025/08/12 18:28:21 [error] 1635#1635: *1530 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 42.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-42_80/", host: "42.example.com" +2025/08/12 18:28:22 [error] 1670#1670: *1577 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 43.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-43_80/", host: "43.example.com" +2025/08/12 18:28:24 [error] 1706#1706: *1626 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 44.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-44_80/", host: "44.example.com" +2025/08/12 18:28:24 [error] 1707#1707: *1640 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 44.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-44_80/", host: "44.example.com" +2025/08/12 18:28:26 [error] 1742#1742: *1688 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 45.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-45_80/", host: "45.example.com" +2025/08/12 18:28:26 [error] 1743#1743: *1702 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 45.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-45_80/", host: "45.example.com" +2025/08/12 18:28:28 [error] 1779#1779: *1746 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 46.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-46_80/", host: "46.example.com" +2025/08/12 18:28:28 [error] 1780#1780: *1761 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 46.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-46_80/", host: "46.example.com" +2025/08/12 18:28:30 [error] 1815#1815: *1812 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 47.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-47_80/", host: "47.example.com" +2025/08/12 18:28:32 [error] 1851#1851: *1865 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 48.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-48_80/", host: "48.example.com" +2025/08/12 18:28:32 [error] 1852#1852: *1880 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 48.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-48_80/", host: "48.example.com" +2025/08/12 18:28:34 [error] 1888#1888: *1929 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 49.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-49_80/", host: "49.example.com" +2025/08/12 18:28:34 [error] 1889#1889: *1944 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 49.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-49_80/", host: "49.example.com" +2025/08/12 18:28:35 [error] 1924#1924: *1992 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 50.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-50_80/", host: "50.example.com" +2025/08/12 18:28:35 [error] 1925#1925: *2007 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 50.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-50_80/", host: "50.example.com" +2025/08/12 18:28:37 [error] 1960#1960: *2052 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 51.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-51_80/", host: "51.example.com" +2025/08/12 18:28:37 [error] 1961#1961: *2066 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 51.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-51_80/", host: "51.example.com" +2025/08/12 18:28:37 [error] 1963#1963: *2075 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 51.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-51_80/", host: "51.example.com" +2025/08/12 18:28:39 [error] 1996#1996: *2120 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 52.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-52_80/", host: "52.example.com" +2025/08/12 18:28:39 [error] 1997#1997: *2136 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 52.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-52_80/", host: "52.example.com" +2025/08/12 18:28:41 [error] 2032#2032: *2188 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 53.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-53_80/", host: "53.example.com" +2025/08/12 18:28:41 [error] 2033#2033: *2203 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 53.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-53_80/", host: "53.example.com" +2025/08/12 18:28:43 [error] 2069#2069: *2251 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 54.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-54_80/", host: "54.example.com" +2025/08/12 18:28:43 [error] 2070#2070: *2266 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 54.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-54_80/", host: "54.example.com" +2025/08/12 18:28:45 [error] 2105#2105: *2317 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 55.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-55_80/", host: "55.example.com" +2025/08/12 18:28:46 [error] 2106#2106: *2332 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 55.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-55_80/", host: "55.example.com" +2025/08/12 18:28:46 [error] 2108#2108: *2341 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 55.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-55_80/", host: "55.example.com" +2025/08/12 18:28:48 [error] 2142#2142: *2383 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 56.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-56_80/", host: "56.example.com" +2025/08/12 18:28:48 [error] 2143#2143: *2398 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 56.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-56_80/", host: "56.example.com" +2025/08/12 18:28:48 [error] 2145#2145: *2411 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 56.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-56_80/", host: "56.example.com" +2025/08/12 18:28:50 [error] 2178#2178: *2456 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 57.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-57_80/", host: "57.example.com" +2025/08/12 18:28:50 [error] 2179#2179: *2472 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 57.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-57_80/", host: "57.example.com" +2025/08/12 18:28:50 [error] 2181#2181: *2484 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 57.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-57_80/", host: "57.example.com" +2025/08/12 18:28:51 [error] 2214#2214: *2531 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 58.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-58_80/", host: "58.example.com" +2025/08/12 18:28:51 [error] 2215#2215: *2547 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 58.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-58_80/", host: "58.example.com" +2025/08/12 18:28:51 [error] 2217#2217: *2562 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 58.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-58_80/", host: "58.example.com" +2025/08/12 18:28:54 [error] 2251#2251: *2597 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 59.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-59_80/", host: "59.example.com" +2025/08/12 18:28:54 [error] 2252#2252: *2612 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 59.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-59_80/", host: "59.example.com" +2025/08/12 18:28:54 [error] 2254#2254: *2627 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 59.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-59_80/", host: "59.example.com" +2025/08/12 18:28:55 [error] 2287#2287: *2672 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 60.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-60_80/", host: "60.example.com" +2025/08/12 18:28:55 [error] 2288#2288: *2688 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 60.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-60_80/", host: "60.example.com" +2025/08/12 18:28:55 [error] 2290#2290: *2704 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 60.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-60_80/", host: "60.example.com" +2025/08/12 18:28:57 [error] 2323#2323: *2754 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 61.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-61_80/", host: "61.example.com" +2025/08/12 18:28:58 [error] 2324#2324: *2766 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 61.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-61_80/", host: "61.example.com" +2025/08/12 18:28:58 [error] 2326#2326: *2781 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 61.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-61_80/", host: "61.example.com" +2025/08/12 18:29:00 [error] 2360#2360: *2837 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 62.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-62_80/", host: "62.example.com" +2025/08/12 18:29:01 [error] 2361#2361: *2852 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 62.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-62_80/", host: "62.example.com" +2025/08/12 18:29:01 [error] 2363#2363: *2867 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 62.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-62_80/", host: "62.example.com" +2025/08/12 18:29:03 [error] 2397#2397: *2917 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 63.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-63_80/", host: "63.example.com" +2025/08/12 18:29:03 [error] 2398#2398: *2932 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 63.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-63_80/", host: "63.example.com" +2025/08/12 18:29:03 [error] 2400#2400: *2948 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 63.example.com, request: "GET / HTTP/2.0", upstream: "http://scale_backend-63_80/", host: "63.example.com" diff --git a/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ttr-oss.png b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ttr-oss.png new file mode 100644 index 0000000000..694536066c Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ttr-oss.png differ diff --git a/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ttr-plus.png b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ttr-plus.png new file mode 100644 index 0000000000..afe5382f7f Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_HTTPSListeners/ttr-plus.png differ diff --git a/tests/results/scale/2.1.0/TestScale_Listeners/cpu-oss.png b/tests/results/scale/2.1.0/TestScale_Listeners/cpu-oss.png new file mode 100644 index 0000000000..5ca95e980a Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_Listeners/cpu-oss.png differ diff --git a/tests/results/scale/2.1.0/TestScale_Listeners/cpu-plus.png b/tests/results/scale/2.1.0/TestScale_Listeners/cpu-plus.png new file mode 100644 index 0000000000..35af944b17 Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_Listeners/cpu-plus.png differ diff --git a/tests/results/scale/2.1.0/TestScale_Listeners/memory-oss.png b/tests/results/scale/2.1.0/TestScale_Listeners/memory-oss.png new file mode 100644 index 0000000000..9a21bd617a Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_Listeners/memory-oss.png differ diff --git a/tests/results/scale/2.1.0/TestScale_Listeners/memory-plus.png b/tests/results/scale/2.1.0/TestScale_Listeners/memory-plus.png new file mode 100644 index 0000000000..1b6698664e Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_Listeners/memory-plus.png differ diff --git a/tests/results/scale/2.1.0/TestScale_Listeners/ngf-oss.log b/tests/results/scale/2.1.0/TestScale_Listeners/ngf-oss.log new file mode 100644 index 0000000000..8504b5af91 --- /dev/null +++ b/tests/results/scale/2.1.0/TestScale_Listeners/ngf-oss.log @@ -0,0 +1,22 @@ +{"level":"debug","ts":"2025-08-12T18:15:08Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:15:57Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:15:58Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:03Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:04Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:04Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:05Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:06Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"error","ts":"2025-08-12T18:16:06Z","logger":"statusUpdater","msg":"Failed to update status","namespace":"scale","name":"gateway","kind":"","error":"timed out waiting for the condition","stacktrace":"github.com/nginx/nginx-gateway-fabric/v2/internal/controller/status.(*Updater).writeStatuses\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/status/updater.go:112\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller/status.(*Updater).Update\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/status/updater.go:83\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller/status.(*LeaderAwareGroupUpdater).UpdateGroup\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/status/leader_aware_group_updater.go:54\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller.(*eventHandlerImpl).updateStatuses\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/handler.go:373\ngithub.com/nginx/nginx-gateway-fabric/v2/internal/controller.(*eventHandlerImpl).waitForStatusUpdates\n\t/home/runner/work/nginx-gateway-fabric/nginx-gateway-fabric/internal/controller/handler.go:273"} +{"level":"debug","ts":"2025-08-12T18:16:07Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:15Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:16Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:18Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:18Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:19Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:21Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:32Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:33Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:34Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:39Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:41Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:16:43Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} diff --git a/tests/results/scale/2.1.0/TestScale_Listeners/ngf-plus.log b/tests/results/scale/2.1.0/TestScale_Listeners/ngf-plus.log new file mode 100644 index 0000000000..1cb1e2d00d --- /dev/null +++ b/tests/results/scale/2.1.0/TestScale_Listeners/ngf-plus.log @@ -0,0 +1,11 @@ +{"level":"debug","ts":"2025-08-12T18:23:14Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:23:16Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:23:19Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:23:22Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:23:28Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:23:28Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:23:29Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:23:39Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:23:40Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:23:40Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} +{"level":"debug","ts":"2025-08-12T18:23:41Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} diff --git a/tests/results/scale/2.1.0/TestScale_Listeners/nginx-plus.log b/tests/results/scale/2.1.0/TestScale_Listeners/nginx-plus.log new file mode 100644 index 0000000000..a94dd0d21c --- /dev/null +++ b/tests/results/scale/2.1.0/TestScale_Listeners/nginx-plus.log @@ -0,0 +1,172 @@ +2025/08/12 18:23:05 [error] 125#125: *47 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 1.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-1_80/", host: "1.example.com" +2025/08/12 18:23:06 [error] 126#126: *48 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 1.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-1_80/", host: "1.example.com" +2025/08/12 18:23:06 [error] 128#128: *49 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 1.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-1_80/", host: "1.example.com" +2025/08/12 18:23:06 [error] 129#129: *50 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 1.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-1_80/", host: "1.example.com" +2025/08/12 18:23:06 [error] 144#144: *62 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 2.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-2_80/", host: "2.example.com" +2025/08/12 18:23:06 [error] 145#145: *63 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 2.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-2_80/", host: "2.example.com" +2025/08/12 18:23:07 [error] 147#147: *64 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 2.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-2_80/", host: "2.example.com" +2025/08/12 18:23:07 [error] 148#148: *65 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 2.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-2_80/", host: "2.example.com" +2025/08/12 18:23:07 [error] 162#162: *79 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 3.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-3_80/", host: "3.example.com" +2025/08/12 18:23:07 [error] 163#163: *80 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 3.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-3_80/", host: "3.example.com" +2025/08/12 18:23:07 [error] 165#165: *81 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 3.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-3_80/", host: "3.example.com" +2025/08/12 18:23:08 [error] 166#166: *82 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 3.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-3_80/", host: "3.example.com" +2025/08/12 18:23:08 [error] 181#181: *96 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 4.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-4_80/", host: "4.example.com" +2025/08/12 18:23:08 [error] 180#180: *97 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 4.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-4_80/", host: "4.example.com" +2025/08/12 18:23:08 [error] 183#183: *98 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 4.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-4_80/", host: "4.example.com" +2025/08/12 18:23:08 [error] 184#184: *99 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 4.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-4_80/", host: "4.example.com" +2025/08/12 18:23:09 [error] 198#198: *114 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 5.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-5_80/", host: "5.example.com" +2025/08/12 18:23:09 [error] 199#199: *115 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 5.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-5_80/", host: "5.example.com" +2025/08/12 18:23:09 [error] 201#201: *116 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 5.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-5_80/", host: "5.example.com" +2025/08/12 18:23:09 [error] 202#202: *117 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 5.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-5_80/", host: "5.example.com" +2025/08/12 18:23:10 [error] 216#216: *133 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 6.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-6_80/", host: "6.example.com" +2025/08/12 18:23:10 [error] 217#217: *134 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 6.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-6_80/", host: "6.example.com" +2025/08/12 18:23:10 [error] 219#219: *135 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 6.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-6_80/", host: "6.example.com" +2025/08/12 18:23:10 [error] 220#220: *136 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 6.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-6_80/", host: "6.example.com" +2025/08/12 18:23:11 [error] 235#235: *154 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 7.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-7_80/", host: "7.example.com" +2025/08/12 18:23:11 [error] 236#236: *155 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 7.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-7_80/", host: "7.example.com" +2025/08/12 18:23:11 [error] 238#238: *156 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 7.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-7_80/", host: "7.example.com" +2025/08/12 18:23:11 [error] 239#239: *157 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 7.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-7_80/", host: "7.example.com" +2025/08/12 18:23:11 [error] 240#240: *158 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 7.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-7_80/", host: "7.example.com" +2025/08/12 18:23:12 [error] 253#253: *176 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 8.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-8_80/", host: "8.example.com" +2025/08/12 18:23:12 [error] 254#254: *177 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 8.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-8_80/", host: "8.example.com" +2025/08/12 18:23:12 [error] 256#256: *178 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 8.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-8_80/", host: "8.example.com" +2025/08/12 18:23:12 [error] 257#257: *179 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 8.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-8_80/", host: "8.example.com" +2025/08/12 18:23:12 [error] 258#258: *180 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 8.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-8_80/", host: "8.example.com" +2025/08/12 18:23:13 [error] 271#271: *199 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 9.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-9_80/", host: "9.example.com" +2025/08/12 18:23:13 [error] 272#272: *200 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 9.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-9_80/", host: "9.example.com" +2025/08/12 18:23:13 [error] 274#274: *201 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 9.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-9_80/", host: "9.example.com" +2025/08/12 18:23:13 [error] 275#275: *202 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 9.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-9_80/", host: "9.example.com" +2025/08/12 18:23:13 [error] 276#276: *203 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 9.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-9_80/", host: "9.example.com" +2025/08/12 18:23:14 [error] 289#289: *214 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 10.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-10_80/", host: "10.example.com" +2025/08/12 18:23:14 [error] 290#290: *215 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 10.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-10_80/", host: "10.example.com" +2025/08/12 18:23:14 [error] 292#292: *216 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 10.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-10_80/", host: "10.example.com" +2025/08/12 18:23:14 [error] 293#293: *217 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 10.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-10_80/", host: "10.example.com" +2025/08/12 18:23:15 [error] 307#307: *238 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 11.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-11_80/", host: "11.example.com" +2025/08/12 18:23:15 [error] 308#308: *239 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 11.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-11_80/", host: "11.example.com" +2025/08/12 18:23:15 [error] 310#310: *240 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 11.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-11_80/", host: "11.example.com" +2025/08/12 18:23:15 [error] 311#311: *241 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 11.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-11_80/", host: "11.example.com" +2025/08/12 18:23:16 [error] 326#326: *264 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 12.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-12_80/", host: "12.example.com" +2025/08/12 18:23:16 [error] 327#327: *265 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 12.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-12_80/", host: "12.example.com" +2025/08/12 18:23:16 [error] 329#329: *266 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 12.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-12_80/", host: "12.example.com" +2025/08/12 18:23:16 [error] 330#330: *267 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 12.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-12_80/", host: "12.example.com" +2025/08/12 18:23:17 [error] 344#344: *290 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 13.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-13_80/", host: "13.example.com" +2025/08/12 18:23:17 [error] 345#345: *291 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 13.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-13_80/", host: "13.example.com" +2025/08/12 18:23:17 [error] 347#347: *293 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 13.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-13_80/", host: "13.example.com" +2025/08/12 18:23:17 [error] 348#348: *294 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 13.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-13_80/", host: "13.example.com" +2025/08/12 18:23:17 [error] 362#362: *318 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 14.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-14_80/", host: "14.example.com" +2025/08/12 18:23:18 [error] 363#363: *319 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 14.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-14_80/", host: "14.example.com" +2025/08/12 18:23:18 [error] 365#365: *320 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 14.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-14_80/", host: "14.example.com" +2025/08/12 18:23:18 [error] 366#366: *321 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 14.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-14_80/", host: "14.example.com" +2025/08/12 18:23:18 [error] 367#367: *322 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 14.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-14_80/", host: "14.example.com" +2025/08/12 18:23:19 [error] 380#380: *347 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 15.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-15_80/", host: "15.example.com" +2025/08/12 18:23:19 [error] 381#381: *348 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 15.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-15_80/", host: "15.example.com" +2025/08/12 18:23:19 [error] 383#383: *349 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 15.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-15_80/", host: "15.example.com" +2025/08/12 18:23:19 [error] 384#384: *350 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 15.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-15_80/", host: "15.example.com" +2025/08/12 18:23:19 [error] 398#398: *376 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 16.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-16_80/", host: "16.example.com" +2025/08/12 18:23:20 [error] 399#399: *377 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 16.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-16_80/", host: "16.example.com" +2025/08/12 18:23:20 [error] 401#401: *378 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 16.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-16_80/", host: "16.example.com" +2025/08/12 18:23:20 [error] 402#402: *379 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 16.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-16_80/", host: "16.example.com" +2025/08/12 18:23:20 [error] 416#416: *407 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 17.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-17_80/", host: "17.example.com" +2025/08/12 18:23:21 [error] 417#417: *408 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 17.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-17_80/", host: "17.example.com" +2025/08/12 18:23:21 [error] 419#419: *409 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 17.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-17_80/", host: "17.example.com" +2025/08/12 18:23:21 [error] 420#420: *410 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 17.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-17_80/", host: "17.example.com" +2025/08/12 18:23:21 [error] 421#421: *411 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 17.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-17_80/", host: "17.example.com" +2025/08/12 18:23:22 [error] 435#435: *439 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 18.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-18_80/", host: "18.example.com" +2025/08/12 18:23:22 [error] 436#436: *440 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 18.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-18_80/", host: "18.example.com" +2025/08/12 18:23:22 [error] 438#438: *441 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 18.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-18_80/", host: "18.example.com" +2025/08/12 18:23:22 [error] 439#439: *442 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 18.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-18_80/", host: "18.example.com" +2025/08/12 18:23:23 [error] 453#453: *471 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 19.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-19_80/", host: "19.example.com" +2025/08/12 18:23:23 [error] 454#454: *472 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 19.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-19_80/", host: "19.example.com" +2025/08/12 18:23:23 [error] 456#456: *473 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 19.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-19_80/", host: "19.example.com" +2025/08/12 18:23:23 [error] 457#457: *474 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 19.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-19_80/", host: "19.example.com" +2025/08/12 18:23:23 [error] 458#458: *475 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 19.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-19_80/", host: "19.example.com" +2025/08/12 18:23:24 [error] 471#471: *506 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 20.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-20_80/", host: "20.example.com" +2025/08/12 18:23:24 [error] 472#472: *507 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 20.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-20_80/", host: "20.example.com" +2025/08/12 18:23:24 [error] 474#474: *508 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 20.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-20_80/", host: "20.example.com" +2025/08/12 18:23:24 [error] 475#475: *509 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 20.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-20_80/", host: "20.example.com" +2025/08/12 18:23:24 [error] 476#476: *510 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 20.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-20_80/", host: "20.example.com" +2025/08/12 18:23:26 [error] 508#508: *571 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 22.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-22_80/", host: "22.example.com" +2025/08/12 18:23:27 [error] 526#526: *603 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 23.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-23_80/", host: "23.example.com" +2025/08/12 18:23:32 [error] 635#635: *810 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 29.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-29_80/", host: "29.example.com" +2025/08/12 18:23:33 [error] 654#654: *851 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 30.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-30_80/", host: "30.example.com" +2025/08/12 18:23:34 [error] 671#671: *891 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 31.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-31_80/", host: "31.example.com" +2025/08/12 18:23:35 [error] 689#689: *932 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 32.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-32_80/", host: "32.example.com" +2025/08/12 18:23:36 [error] 708#708: *967 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 33.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-33_80/", host: "33.example.com" +2025/08/12 18:23:36 [error] 709#709: *980 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 33.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-33_80/", host: "33.example.com" +2025/08/12 18:23:37 [error] 726#726: *1027 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 34.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-34_80/", host: "34.example.com" +2025/08/12 18:23:38 [error] 727#727: *1039 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 34.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-34_80/", host: "34.example.com" +2025/08/12 18:23:39 [error] 745#745: *1072 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 35.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-35_80/", host: "35.example.com" +2025/08/12 18:23:39 [error] 763#763: *1118 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 36.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-36_80/", host: "36.example.com" +2025/08/12 18:23:40 [error] 781#781: *1157 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 37.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-37_80/", host: "37.example.com" +2025/08/12 18:23:40 [error] 782#782: *1171 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 37.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-37_80/", host: "37.example.com" +2025/08/12 18:23:42 [error] 800#800: *1212 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 38.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-38_80/", host: "38.example.com" +2025/08/12 18:23:43 [error] 818#818: *1256 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 39.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-39_80/", host: "39.example.com" +2025/08/12 18:23:43 [error] 819#819: *1270 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 39.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-39_80/", host: "39.example.com" +2025/08/12 18:23:45 [error] 836#836: *1304 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 40.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-40_80/", host: "40.example.com" +2025/08/12 18:23:45 [error] 837#837: *1318 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 40.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-40_80/", host: "40.example.com" +2025/08/12 18:23:47 [error] 855#855: *1357 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 41.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-41_80/", host: "41.example.com" +2025/08/12 18:23:47 [error] 856#856: *1372 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 41.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-41_80/", host: "41.example.com" +2025/08/12 18:23:48 [error] 873#873: *1411 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 42.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-42_80/", host: "42.example.com" +2025/08/12 18:23:48 [error] 874#874: *1426 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 42.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-42_80/", host: "42.example.com" +2025/08/12 18:23:50 [error] 891#891: *1465 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 43.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-43_80/", host: "43.example.com" +2025/08/12 18:23:50 [error] 892#892: *1479 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 43.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-43_80/", host: "43.example.com" +2025/08/12 18:23:51 [error] 910#910: *1515 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 44.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-44_80/", host: "44.example.com" +2025/08/12 18:23:51 [error] 911#911: *1528 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 44.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-44_80/", host: "44.example.com" +2025/08/12 18:23:53 [error] 928#928: *1574 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 45.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-45_80/", host: "45.example.com" +2025/08/12 18:23:53 [error] 929#929: *1586 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 45.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-45_80/", host: "45.example.com" +2025/08/12 18:23:54 [error] 946#946: *1625 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 46.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-46_80/", host: "46.example.com" +2025/08/12 18:23:55 [error] 947#947: *1639 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 46.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-46_80/", host: "46.example.com" +2025/08/12 18:23:56 [error] 966#966: *1680 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 47.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-47_80/", host: "47.example.com" +2025/08/12 18:23:56 [error] 965#965: *1695 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 47.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-47_80/", host: "47.example.com" +2025/08/12 18:23:58 [error] 983#983: *1738 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 48.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-48_80/", host: "48.example.com" +2025/08/12 18:23:58 [error] 984#984: *1751 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 48.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-48_80/", host: "48.example.com" +2025/08/12 18:23:58 [error] 986#986: *1764 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 48.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-48_80/", host: "48.example.com" +2025/08/12 18:24:00 [error] 1001#1001: *1798 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 49.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-49_80/", host: "49.example.com" +2025/08/12 18:24:00 [error] 1002#1002: *1812 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 49.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-49_80/", host: "49.example.com" +2025/08/12 18:24:00 [error] 1004#1004: *1824 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 49.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-49_80/", host: "49.example.com" +2025/08/12 18:24:02 [error] 1020#1020: *1861 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 50.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-50_80/", host: "50.example.com" +2025/08/12 18:24:02 [error] 1021#1021: *1875 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 50.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-50_80/", host: "50.example.com" +2025/08/12 18:24:02 [error] 1023#1023: *1889 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 50.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-50_80/", host: "50.example.com" +2025/08/12 18:24:04 [error] 1039#1039: *1921 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 51.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-51_80/", host: "51.example.com" +2025/08/12 18:24:04 [error] 1040#1040: *1934 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 51.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-51_80/", host: "51.example.com" +2025/08/12 18:24:04 [error] 1042#1042: *1946 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 51.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-51_80/", host: "51.example.com" +2025/08/12 18:24:05 [error] 1057#1057: *1991 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 52.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-52_80/", host: "52.example.com" +2025/08/12 18:24:05 [error] 1058#1058: *2003 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 52.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-52_80/", host: "52.example.com" +2025/08/12 18:24:06 [error] 1060#1060: *2014 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 52.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-52_80/", host: "52.example.com" +2025/08/12 18:24:07 [error] 1076#1076: *2048 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 53.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-53_80/", host: "53.example.com" +2025/08/12 18:24:07 [error] 1077#1077: *2060 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 53.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-53_80/", host: "53.example.com" +2025/08/12 18:24:07 [error] 1079#1079: *2071 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 53.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-53_80/", host: "53.example.com" +2025/08/12 18:24:08 [error] 1080#1080: *2082 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 53.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-53_80/", host: "53.example.com" +2025/08/12 18:24:09 [error] 1094#1094: *2109 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 54.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-54_80/", host: "54.example.com" +2025/08/12 18:24:09 [error] 1095#1095: *2122 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 54.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-54_80/", host: "54.example.com" +2025/08/12 18:24:09 [error] 1097#1097: *2135 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 54.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-54_80/", host: "54.example.com" +2025/08/12 18:24:11 [error] 1113#1113: *2182 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 55.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-55_80/", host: "55.example.com" +2025/08/12 18:24:11 [error] 1114#1114: *2194 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 55.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-55_80/", host: "55.example.com" +2025/08/12 18:24:11 [error] 1116#1116: *2210 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 55.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-55_80/", host: "55.example.com" +2025/08/12 18:24:13 [error] 1131#1131: *2251 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 56.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-56_80/", host: "56.example.com" +2025/08/12 18:24:13 [error] 1132#1132: *2266 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 56.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-56_80/", host: "56.example.com" +2025/08/12 18:24:13 [error] 1134#1134: *2281 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 56.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-56_80/", host: "56.example.com" +2025/08/12 18:24:15 [error] 1149#1149: *2316 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 57.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-57_80/", host: "57.example.com" +2025/08/12 18:24:15 [error] 1150#1150: *2330 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 57.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-57_80/", host: "57.example.com" +2025/08/12 18:24:16 [error] 1152#1152: *2344 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 57.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-57_80/", host: "57.example.com" +2025/08/12 18:24:18 [error] 1168#1168: *2385 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 58.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-58_80/", host: "58.example.com" +2025/08/12 18:24:18 [error] 1169#1169: *2398 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 58.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-58_80/", host: "58.example.com" +2025/08/12 18:24:18 [error] 1171#1171: *2412 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 58.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-58_80/", host: "58.example.com" +2025/08/12 18:24:20 [error] 1186#1186: *2454 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 59.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-59_80/", host: "59.example.com" +2025/08/12 18:24:20 [error] 1187#1187: *2468 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 59.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-59_80/", host: "59.example.com" +2025/08/12 18:24:20 [error] 1189#1189: *2480 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 59.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-59_80/", host: "59.example.com" +2025/08/12 18:24:21 [error] 1205#1205: *2525 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 60.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-60_80/", host: "60.example.com" +2025/08/12 18:24:21 [error] 1206#1206: *2539 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 60.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-60_80/", host: "60.example.com" +2025/08/12 18:24:22 [error] 1208#1208: *2553 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 60.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-60_80/", host: "60.example.com" +2025/08/12 18:24:24 [error] 1223#1223: *2595 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 61.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-61_80/", host: "61.example.com" +2025/08/12 18:24:24 [error] 1224#1224: *2605 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 61.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-61_80/", host: "61.example.com" +2025/08/12 18:24:24 [error] 1226#1226: *2616 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 61.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-61_80/", host: "61.example.com" +2025/08/12 18:24:24 [error] 1227#1227: *2630 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 61.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-61_80/", host: "61.example.com" +2025/08/12 18:24:26 [error] 1242#1242: *2664 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 62.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-62_80/", host: "62.example.com" +2025/08/12 18:24:26 [error] 1243#1243: *2677 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 62.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-62_80/", host: "62.example.com" +2025/08/12 18:24:26 [error] 1245#1245: *2692 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 62.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-62_80/", host: "62.example.com" +2025/08/12 18:24:26 [error] 1246#1246: *2704 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 62.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-62_80/", host: "62.example.com" +2025/08/12 18:24:28 [error] 1260#1260: *2743 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 63.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-63_80/", host: "63.example.com" +2025/08/12 18:24:28 [error] 1261#1261: *2757 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 63.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-63_80/", host: "63.example.com" +2025/08/12 18:24:28 [error] 1263#1263: *2771 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 63.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-63_80/", host: "63.example.com" +2025/08/12 18:24:28 [error] 1264#1264: *2785 no live upstreams while connecting to upstream, client: 34.168.158.112, server: 63.example.com, request: "GET / HTTP/1.1", upstream: "http://scale_backend-63_80/", host: "63.example.com" diff --git a/tests/results/scale/2.1.0/TestScale_Listeners/ttr-oss.png b/tests/results/scale/2.1.0/TestScale_Listeners/ttr-oss.png new file mode 100644 index 0000000000..9d6a6410fa Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_Listeners/ttr-oss.png differ diff --git a/tests/results/scale/2.1.0/TestScale_Listeners/ttr-plus.png b/tests/results/scale/2.1.0/TestScale_Listeners/ttr-plus.png new file mode 100644 index 0000000000..1625556986 Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_Listeners/ttr-plus.png differ diff --git a/tests/results/scale/2.1.0/TestScale_UpstreamServers/cpu-oss.png b/tests/results/scale/2.1.0/TestScale_UpstreamServers/cpu-oss.png new file mode 100644 index 0000000000..0a6f1c7469 Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_UpstreamServers/cpu-oss.png differ diff --git a/tests/results/scale/2.1.0/TestScale_UpstreamServers/cpu-plus.png b/tests/results/scale/2.1.0/TestScale_UpstreamServers/cpu-plus.png new file mode 100644 index 0000000000..35c15b1654 Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_UpstreamServers/cpu-plus.png differ diff --git a/tests/results/scale/2.1.0/TestScale_UpstreamServers/memory-oss.png b/tests/results/scale/2.1.0/TestScale_UpstreamServers/memory-oss.png new file mode 100644 index 0000000000..f1c217c35b Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_UpstreamServers/memory-oss.png differ diff --git a/tests/results/scale/2.1.0/TestScale_UpstreamServers/memory-plus.png b/tests/results/scale/2.1.0/TestScale_UpstreamServers/memory-plus.png new file mode 100644 index 0000000000..39cb116d03 Binary files /dev/null and b/tests/results/scale/2.1.0/TestScale_UpstreamServers/memory-plus.png differ diff --git a/tests/results/scale/2.1.0/TestScale_UpstreamServers/ngf-oss.log b/tests/results/scale/2.1.0/TestScale_UpstreamServers/ngf-oss.log new file mode 100644 index 0000000000..877962793d --- /dev/null +++ b/tests/results/scale/2.1.0/TestScale_UpstreamServers/ngf-oss.log @@ -0,0 +1,4 @@ +{"level":"debug","ts":"2025-08-12T18:32:05Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gatewayclasses.gateway.networking.k8s.io \"nginx\": the object has been modified; please apply your changes to the latest version and try again","namespace":"","name":"nginx","kind":"GatewayClass"} +{"level":"debug","ts":"2025-08-12T18:32:40Z","logger":"eventLoop.eventHandler","msg":"failed to resolve endpoints, endpoints may not be ready","batchID":25,"error":"no endpoints found for Service scale/backend","service":{"name":"backend","namespace":"scale"}} +{"level":"debug","ts":"2025-08-12T18:32:40Z","logger":"eventLoop.eventHandler","msg":"failed to resolve endpoints, endpoints may not be ready","batchID":26,"error":"no valid endpoints found for Service scale/backend and port 80","service":{"name":"backend","namespace":"scale"}} +{"level":"debug","ts":"2025-08-12T18:32:41Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} diff --git a/tests/results/scale/2.1.0/TestScale_UpstreamServers/ngf-plus.log b/tests/results/scale/2.1.0/TestScale_UpstreamServers/ngf-plus.log new file mode 100644 index 0000000000..f052b05b90 --- /dev/null +++ b/tests/results/scale/2.1.0/TestScale_UpstreamServers/ngf-plus.log @@ -0,0 +1,3 @@ +{"level":"debug","ts":"2025-08-12T18:41:59Z","logger":"eventLoop.eventHandler","msg":"failed to resolve endpoints, endpoints may not be ready","batchID":24,"error":"no endpoints found for Service scale/backend","service":{"name":"backend","namespace":"scale"}} +{"level":"debug","ts":"2025-08-12T18:41:59Z","logger":"eventLoop.eventHandler","msg":"failed to resolve endpoints, endpoints may not be ready","batchID":25,"error":"no valid endpoints found for Service scale/backend and port 80","service":{"name":"backend","namespace":"scale"}} +{"level":"debug","ts":"2025-08-12T18:41:59Z","logger":"statusUpdater","msg":"Encountered error updating status","error":"Operation cannot be fulfilled on gateways.gateway.networking.k8s.io \"gateway\": the object has been modified; please apply your changes to the latest version and try again","namespace":"scale","name":"gateway","kind":"Gateway"} diff --git a/tests/results/zero-downtime-scale/2.1.0/2.1.0-oss.md b/tests/results/zero-downtime-scale/2.1.0/2.1.0-oss.md new file mode 100644 index 0000000000..df27ec6087 --- /dev/null +++ b/tests/results/zero-downtime-scale/2.1.0/2.1.0-oss.md @@ -0,0 +1,287 @@ +# Results + +## Test environment + +NGINX Plus: false + +NGINX Gateway Fabric: + +- Commit: 43424f7eafc27e3fed07cef693614a7f389a3359 +- Date: 2025-08-12T16:35:50Z +- Dirty: false + +GKE Cluster: + +- Node count: 12 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 16 +- RAM per node: 65851524Ki +- Max pods per node: 110 +- Zone: us-west1-b +- Instance Type: n2d-standard-16 + +## Summary: + +- Decreased latency in requests across all tests by an insignificant amount. + +## One NGINX Pod runs per node Test Results + +### Scale Up Gradually + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 30000, 100.00, 100.00 +Duration [total, attack, wait] 5m0s, 5m0s, 1.177ms +Latencies [min, mean, 50, 90, 95, 99, max] 271.226µs, 1.208ms, 1.128ms, 1.442ms, 1.565ms, 2.119ms, 248.605ms +Bytes In [total, mean] 4832720, 161.09 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 99.99% +Status Codes [code:count] 0:2 200:29998 +Error Set: +Get "http://cafe.example.com/coffee": dial tcp 0.0.0.0:0->10.138.0.14:80: connect: network is unreachable +``` + +![gradual-scale-up-affinity-http-oss.png](gradual-scale-up-affinity-http-oss.png) + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 30000, 100.00, 100.00 +Duration [total, attack, wait] 5m0s, 5m0s, 1.136ms +Latencies [min, mean, 50, 90, 95, 99, max] 236.862µs, 1.285ms, 1.181ms, 1.479ms, 1.605ms, 2.314ms, 252.017ms +Bytes In [total, mean] 4652901, 155.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 0:1 200:29999 +Error Set: +Get "https://cafe.example.com/tea": dial tcp 0.0.0.0:0->10.138.0.14:443: connect: network is unreachable +``` + +![gradual-scale-up-affinity-https-oss.png](gradual-scale-up-affinity-https-oss.png) + +### Scale Down Gradually + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 48000, 100.00, 100.00 +Duration [total, attack, wait] 8m0s, 8m0s, 1.376ms +Latencies [min, mean, 50, 90, 95, 99, max] 642.962µs, 1.144ms, 1.134ms, 1.288ms, 1.351ms, 1.652ms, 35.611ms +Bytes In [total, mean] 7444806, 155.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:48000 +Error Set: +``` + +![gradual-scale-down-affinity-https-oss.png](gradual-scale-down-affinity-https-oss.png) + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 48000, 100.00, 100.00 +Duration [total, attack, wait] 8m0s, 8m0s, 1.04ms +Latencies [min, mean, 50, 90, 95, 99, max] 589.239µs, 1.086ms, 1.078ms, 1.241ms, 1.302ms, 1.6ms, 35.636ms +Bytes In [total, mean] 7732850, 161.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:48000 +Error Set: +``` + +![gradual-scale-down-affinity-http-oss.png](gradual-scale-down-affinity-http-oss.png) + +### Scale Up Abruptly + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.166ms +Latencies [min, mean, 50, 90, 95, 99, max] 635.472µs, 1.14ms, 1.131ms, 1.287ms, 1.346ms, 1.604ms, 12.365ms +Bytes In [total, mean] 1861212, 155.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-up-affinity-https-oss.png](abrupt-scale-up-affinity-https-oss.png) + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.26ms +Latencies [min, mean, 50, 90, 95, 99, max] 602.076µs, 1.079ms, 1.076ms, 1.233ms, 1.285ms, 1.445ms, 12.338ms +Bytes In [total, mean] 1933235, 161.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-up-affinity-http-oss.png](abrupt-scale-up-affinity-http-oss.png) + +### Scale Down Abruptly + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.148ms +Latencies [min, mean, 50, 90, 95, 99, max] 657.765µs, 1.151ms, 1.135ms, 1.298ms, 1.365ms, 1.715ms, 62.444ms +Bytes In [total, mean] 1861272, 155.11 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-down-affinity-https-oss.png](abrupt-scale-down-affinity-https-oss.png) + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.35ms +Latencies [min, mean, 50, 90, 95, 99, max] 580.673µs, 1.068ms, 1.062ms, 1.217ms, 1.266ms, 1.417ms, 63.405ms +Bytes In [total, mean] 1933160, 161.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-down-affinity-http-oss.png](abrupt-scale-down-affinity-http-oss.png) + +## Multiple NGINX Pods run per node Test Results + +### Scale Up Gradually + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 30000, 100.00, 100.00 +Duration [total, attack, wait] 5m0s, 5m0s, 976.371µs +Latencies [min, mean, 50, 90, 95, 99, max] 595.016µs, 1.121ms, 1.097ms, 1.312ms, 1.427ms, 1.887ms, 22.787ms +Bytes In [total, mean] 4833005, 161.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +![gradual-scale-up-http-oss.png](gradual-scale-up-http-oss.png) + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 30000, 100.00, 100.00 +Duration [total, attack, wait] 5m0s, 5m0s, 1.557ms +Latencies [min, mean, 50, 90, 95, 99, max] 650.263µs, 1.184ms, 1.146ms, 1.349ms, 1.477ms, 1.986ms, 22.968ms +Bytes In [total, mean] 4652987, 155.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +![gradual-scale-up-https-oss.png](gradual-scale-up-https-oss.png) + +### Scale Down Gradually + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 96000, 100.00, 100.00 +Duration [total, attack, wait] 16m0s, 16m0s, 1.199ms +Latencies [min, mean, 50, 90, 95, 99, max] 584.14µs, 1.14ms, 1.116ms, 1.331ms, 1.44ms, 1.887ms, 52.98ms +Bytes In [total, mean] 15465773, 161.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:96000 +Error Set: +``` + +![gradual-scale-down-http-oss.png](gradual-scale-down-http-oss.png) + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 96000, 100.00, 100.00 +Duration [total, attack, wait] 16m0s, 16m0s, 1.031ms +Latencies [min, mean, 50, 90, 95, 99, max] 634.745µs, 1.196ms, 1.169ms, 1.374ms, 1.471ms, 1.909ms, 39.725ms +Bytes In [total, mean] 14889673, 155.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:96000 +Error Set: +``` + +![gradual-scale-down-https-oss.png](gradual-scale-down-https-oss.png) + +### Scale Up Abruptly + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.135ms +Latencies [min, mean, 50, 90, 95, 99, max] 664.466µs, 1.184ms, 1.173ms, 1.35ms, 1.418ms, 1.738ms, 10.866ms +Bytes In [total, mean] 1861227, 155.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-up-https-oss.png](abrupt-scale-up-https-oss.png) + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.245ms +Latencies [min, mean, 50, 90, 95, 99, max] 628.895µs, 1.111ms, 1.109ms, 1.287ms, 1.346ms, 1.633ms, 12.711ms +Bytes In [total, mean] 1933228, 161.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-up-http-oss.png](abrupt-scale-up-http-oss.png) + +### Scale Down Abruptly + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.302ms +Latencies [min, mean, 50, 90, 95, 99, max] 658.812µs, 1.227ms, 1.176ms, 1.351ms, 1.42ms, 1.676ms, 152.248ms +Bytes In [total, mean] 1861205, 155.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-down-https-oss.png](abrupt-scale-down-https-oss.png) + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.311ms +Latencies [min, mean, 50, 90, 95, 99, max] 635.464µs, 1.167ms, 1.119ms, 1.319ms, 1.4ms, 1.684ms, 150.548ms +Bytes In [total, mean] 1933174, 161.10 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-down-http-oss.png](abrupt-scale-down-http-oss.png) diff --git a/tests/results/zero-downtime-scale/2.1.0/2.1.0-plus.md b/tests/results/zero-downtime-scale/2.1.0/2.1.0-plus.md new file mode 100644 index 0000000000..5c692c433e --- /dev/null +++ b/tests/results/zero-downtime-scale/2.1.0/2.1.0-plus.md @@ -0,0 +1,288 @@ +# Results + +## Test environment + +NGINX Plus: true + +NGINX Gateway Fabric: + +- Commit: 43424f7eafc27e3fed07cef693614a7f389a3359 +- Date: 2025-08-12T16:35:50Z +- Dirty: false + +GKE Cluster: + +- Node count: 12 +- k8s version: v1.33.2-gke.1240000 +- vCPUs per node: 16 +- RAM per node: 65851524Ki +- Max pods per node: 110 +- Zone: us-west1-b +- Instance Type: n2d-standard-16 + +## Summary: + +- Latency in requests remained consistent with 2.0 results. +- 502 errors from 2.0 results still remain, but have decreased in frequency (success rate is higher and number of tests which have errors have decreased). + +## One NGINX Pod runs per node Test Results + +### Scale Up Gradually + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 30000, 100.00, 100.00 +Duration [total, attack, wait] 5m0s, 5m0s, 1.125ms +Latencies [min, mean, 50, 90, 95, 99, max] 645.849µs, 1.172ms, 1.161ms, 1.333ms, 1.401ms, 1.687ms, 22.886ms +Bytes In [total, mean] 4628916, 154.30 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +![gradual-scale-up-affinity-https-plus.png](gradual-scale-up-affinity-https-plus.png) + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 30000, 100.00, 100.00 +Duration [total, attack, wait] 5m0s, 5m0s, 1.213ms +Latencies [min, mean, 50, 90, 95, 99, max] 586.397µs, 1.108ms, 1.098ms, 1.279ms, 1.338ms, 1.571ms, 23.119ms +Bytes In [total, mean] 4805911, 160.20 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +![gradual-scale-up-affinity-http-plus.png](gradual-scale-up-affinity-http-plus.png) + +### Scale Down Gradually + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 48000, 100.00, 100.00 +Duration [total, attack, wait] 8m0s, 8m0s, 1.204ms +Latencies [min, mean, 50, 90, 95, 99, max] 662.632µs, 1.16ms, 1.15ms, 1.313ms, 1.37ms, 1.605ms, 43.776ms +Bytes In [total, mean] 7406590, 154.30 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:48000 +Error Set: +``` + +![gradual-scale-down-affinity-https-plus.png](gradual-scale-down-affinity-https-plus.png) + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 48000, 100.00, 100.00 +Duration [total, attack, wait] 8m0s, 8m0s, 1.295ms +Latencies [min, mean, 50, 90, 95, 99, max] 615.262µs, 1.104ms, 1.098ms, 1.271ms, 1.326ms, 1.531ms, 26.719ms +Bytes In [total, mean] 7689520, 160.20 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:48000 +Error Set: +``` + +![gradual-scale-down-affinity-http-plus.png](gradual-scale-down-affinity-http-plus.png) + +### Scale Up Abruptly + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.09ms +Latencies [min, mean, 50, 90, 95, 99, max] 596.91µs, 1.104ms, 1.092ms, 1.262ms, 1.318ms, 1.472ms, 75.02ms +Bytes In [total, mean] 1922425, 160.20 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-up-affinity-http-plus.png](abrupt-scale-up-affinity-http-plus.png) + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.477ms +Latencies [min, mean, 50, 90, 95, 99, max] 659.422µs, 1.16ms, 1.144ms, 1.303ms, 1.36ms, 1.568ms, 80.463ms +Bytes In [total, mean] 1851563, 154.30 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-up-affinity-https-plus.png](abrupt-scale-up-affinity-https-plus.png) + +### Scale Down Abruptly + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.395ms +Latencies [min, mean, 50, 90, 95, 99, max] 687.681µs, 1.195ms, 1.189ms, 1.354ms, 1.415ms, 1.589ms, 42.939ms +Bytes In [total, mean] 1851631, 154.30 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-down-affinity-https-plus.png](abrupt-scale-down-affinity-https-plus.png) + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 91.67 +Duration [total, attack, wait] 2m0s, 2m0s, 1.068ms +Latencies [min, mean, 50, 90, 95, 99, max] 398.026µs, 1.092ms, 1.119ms, 1.304ms, 1.362ms, 1.495ms, 42.972ms +Bytes In [total, mean] 1912272, 159.36 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 91.67% +Status Codes [code:count] 200:11000 502:1000 +Error Set: +502 Bad Gateway +``` + +![abrupt-scale-down-affinity-http-plus.png](abrupt-scale-down-affinity-http-plus.png) + +## Multiple NGINX Pods run per node Test Results + +### Scale Up Gradually + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 30000, 100.00, 100.00 +Duration [total, attack, wait] 5m0s, 5m0s, 1.146ms +Latencies [min, mean, 50, 90, 95, 99, max] 665.252µs, 1.154ms, 1.141ms, 1.299ms, 1.362ms, 1.716ms, 30.787ms +Bytes In [total, mean] 4644007, 154.80 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +![gradual-scale-up-https-plus.png](gradual-scale-up-https-plus.png) + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 30000, 100.00, 100.00 +Duration [total, attack, wait] 5m0s, 5m0s, 950.679µs +Latencies [min, mean, 50, 90, 95, 99, max] 594.605µs, 1.106ms, 1.091ms, 1.268ms, 1.335ms, 1.644ms, 31.101ms +Bytes In [total, mean] 4817925, 160.60 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:30000 +Error Set: +``` + +![gradual-scale-up-http-plus.png](gradual-scale-up-http-plus.png) + +### Scale Down Gradually + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 96000, 100.00, 100.00 +Duration [total, attack, wait] 16m0s, 16m0s, 1.243ms +Latencies [min, mean, 50, 90, 95, 99, max] 592.946µs, 1.116ms, 1.105ms, 1.293ms, 1.366ms, 1.623ms, 49.022ms +Bytes In [total, mean] 15417717, 160.60 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:96000 +Error Set: +``` + +![gradual-scale-down-http-plus.png](gradual-scale-down-http-plus.png) + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 96000, 100.00, 100.00 +Duration [total, attack, wait] 16m0s, 16m0s, 1.229ms +Latencies [min, mean, 50, 90, 95, 99, max] 635.568µs, 1.18ms, 1.167ms, 1.342ms, 1.417ms, 1.684ms, 50.865ms +Bytes In [total, mean] 14860641, 154.80 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:96000 +Error Set: +``` + +![gradual-scale-down-https-plus.png](gradual-scale-down-https-plus.png) + +### Scale Up Abruptly + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 91.67 +Duration [total, attack, wait] 2m0s, 2m0s, 1.23ms +Latencies [min, mean, 50, 90, 95, 99, max] 427.612µs, 1.133ms, 1.127ms, 1.336ms, 1.402ms, 1.562ms, 136.445ms +Bytes In [total, mean] 1916585, 159.72 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 91.67% +Status Codes [code:count] 200:11000 502:1000 +Error Set: +502 Bad Gateway +``` + +![abrupt-scale-up-http-plus.png](abrupt-scale-up-http-plus.png) + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.151ms +Latencies [min, mean, 50, 90, 95, 99, max] 709.412µs, 1.247ms, 1.2ms, 1.39ms, 1.463ms, 1.662ms, 149.538ms +Bytes In [total, mean] 1857573, 154.80 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-up-https-plus.png](abrupt-scale-up-https-plus.png) + +### Scale Down Abruptly + +#### Test: Send http /coffee traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.559ms +Latencies [min, mean, 50, 90, 95, 99, max] 612.804µs, 1.127ms, 1.124ms, 1.317ms, 1.374ms, 1.524ms, 17.905ms +Bytes In [total, mean] 1927261, 160.61 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-down-http-plus.png](abrupt-scale-down-http-plus.png) + +#### Test: Send https /tea traffic + +```text +Requests [total, rate, throughput] 12000, 100.01, 100.01 +Duration [total, attack, wait] 2m0s, 2m0s, 1.342ms +Latencies [min, mean, 50, 90, 95, 99, max] 684.64µs, 1.197ms, 1.187ms, 1.367ms, 1.432ms, 1.596ms, 44.068ms +Bytes In [total, mean] 1857584, 154.80 +Bytes Out [total, mean] 0, 0.00 +Success [ratio] 100.00% +Status Codes [code:count] 200:12000 +Error Set: +``` + +![abrupt-scale-down-https-plus.png](abrupt-scale-down-https-plus.png) diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-http-oss.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-http-oss.png new file mode 100644 index 0000000000..8c1adfa112 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-http-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-http-plus.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-http-plus.png new file mode 100644 index 0000000000..b8c1d1e727 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-http-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-https-oss.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-https-oss.png new file mode 100644 index 0000000000..8c1adfa112 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-https-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-https-plus.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-https-plus.png new file mode 100644 index 0000000000..f2be515543 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-affinity-https-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-http-oss.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-http-oss.png new file mode 100644 index 0000000000..968d5535f7 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-http-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-http-plus.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-http-plus.png new file mode 100644 index 0000000000..79318170c6 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-http-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-https-oss.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-https-oss.png new file mode 100644 index 0000000000..968d5535f7 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-https-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-https-plus.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-https-plus.png new file mode 100644 index 0000000000..79318170c6 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-down-https-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-http-oss.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-http-oss.png new file mode 100644 index 0000000000..7a79ba9128 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-http-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-http-plus.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-http-plus.png new file mode 100644 index 0000000000..97f4c7c5b7 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-http-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-https-oss.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-https-oss.png new file mode 100644 index 0000000000..7a79ba9128 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-https-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-https-plus.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-https-plus.png new file mode 100644 index 0000000000..97f4c7c5b7 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-affinity-https-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-http-oss.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-http-oss.png new file mode 100644 index 0000000000..d76fabb537 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-http-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-http-plus.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-http-plus.png new file mode 100644 index 0000000000..635024f01b Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-http-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-https-oss.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-https-oss.png new file mode 100644 index 0000000000..d76fabb537 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-https-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-https-plus.png b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-https-plus.png new file mode 100644 index 0000000000..589db89cc3 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/abrupt-scale-up-https-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-http-oss.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-http-oss.png new file mode 100644 index 0000000000..3e92aa6f36 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-http-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-http-plus.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-http-plus.png new file mode 100644 index 0000000000..78eb7b85b7 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-http-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-https-oss.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-https-oss.png new file mode 100644 index 0000000000..3e92aa6f36 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-https-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-https-plus.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-https-plus.png new file mode 100644 index 0000000000..78eb7b85b7 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-affinity-https-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-http-oss.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-http-oss.png new file mode 100644 index 0000000000..56840d272b Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-http-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-http-plus.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-http-plus.png new file mode 100644 index 0000000000..2f0c821709 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-http-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-https-oss.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-https-oss.png new file mode 100644 index 0000000000..56840d272b Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-https-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-https-plus.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-https-plus.png new file mode 100644 index 0000000000..2f0c821709 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-down-https-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-http-oss.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-http-oss.png new file mode 100644 index 0000000000..95989cd931 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-http-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-http-plus.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-http-plus.png new file mode 100644 index 0000000000..4f91b46d92 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-http-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-https-oss.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-https-oss.png new file mode 100644 index 0000000000..95989cd931 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-https-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-https-plus.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-https-plus.png new file mode 100644 index 0000000000..4f91b46d92 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-affinity-https-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-http-oss.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-http-oss.png new file mode 100644 index 0000000000..1137b4a76f Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-http-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-http-plus.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-http-plus.png new file mode 100644 index 0000000000..21f4549057 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-http-plus.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-https-oss.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-https-oss.png new file mode 100644 index 0000000000..1137b4a76f Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-https-oss.png differ diff --git a/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-https-plus.png b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-https-plus.png new file mode 100644 index 0000000000..21f4549057 Binary files /dev/null and b/tests/results/zero-downtime-scale/2.1.0/gradual-scale-up-https-plus.png differ