Skip to content

Commit 1a87a0b

Browse files
committed
cleanup
1 parent 9da33ea commit 1a87a0b

File tree

1 file changed

+64
-94
lines changed

1 file changed

+64
-94
lines changed

services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py

Lines changed: 64 additions & 94 deletions
Original file line numberDiff line numberDiff line change
@@ -258,32 +258,28 @@ async def create_services_batch(
258258
create_task_reservations: Callable[[int, int], dict[str, Any]],
259259
service_monitored_labels: dict[DockerLabelKey, str],
260260
osparc_docker_label_keys: StandardSimcoreDockerLabels,
261-
) -> Callable[[Resources, InstanceTypeType | None, int], Awaitable[list[Service]]]:
262-
async def _(
263-
service_resources: Resources,
264-
imposed_instance_type: InstanceTypeType | None,
265-
num_services: int,
266-
) -> list[Service]:
261+
) -> Callable[[_ScaleUpParams], Awaitable[list[Service]]]:
262+
async def _(scale_up_params: _ScaleUpParams) -> list[Service]:
267263
return await asyncio.gather(
268264
*(
269265
create_service(
270266
task_template
271267
| create_task_reservations(
272-
int(service_resources.cpus),
273-
service_resources.ram,
268+
int(scale_up_params.service_resources.cpus),
269+
scale_up_params.service_resources.ram,
274270
),
275271
service_monitored_labels
276272
| osparc_docker_label_keys.to_simcore_runtime_docker_labels(),
277273
"pending",
278274
(
279275
[
280-
f"node.labels.{DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY}=={imposed_instance_type}"
276+
f"node.labels.{DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY}=={scale_up_params.imposed_instance_type}"
281277
]
282-
if imposed_instance_type
278+
if scale_up_params.imposed_instance_type
283279
else []
284280
),
285281
)
286-
for _ in range(num_services)
282+
for _ in range(scale_up_params.num_services)
287283
)
288284
)
289285

@@ -411,28 +407,34 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect
411407
)
412408

413409

410+
@pytest.mark.parametrize(
411+
"scale_up_params",
412+
[
413+
pytest.param(
414+
_ScaleUpParams(
415+
imposed_instance_type=None,
416+
service_resources=Resources(
417+
cpus=4, ram=TypeAdapter(ByteSize).validate_python("128000Gib")
418+
),
419+
num_services=1,
420+
expected_instance_type="r5n.4xlarge",
421+
expected_num_instances=1,
422+
),
423+
id="No explicit instance defined",
424+
),
425+
],
426+
)
414427
async def test_cluster_scaling_with_service_asking_for_too_much_resources_starts_nothing(
415428
minimal_configuration: None,
416-
service_monitored_labels: dict[DockerLabelKey, str],
417429
app_settings: ApplicationSettings,
418430
initialized_app: FastAPI,
419-
create_service: Callable[
420-
[dict[str, Any], dict[DockerLabelKey, str], str], Awaitable[Service]
421-
],
422-
task_template: dict[str, Any],
423-
create_task_reservations: Callable[[int, int], dict[str, Any]],
431+
create_services_batch: Callable[[_ScaleUpParams], Awaitable[list[Service]]],
424432
mock_launch_instances: mock.Mock,
425433
mock_terminate_instances: mock.Mock,
426434
mock_rabbitmq_post_message: mock.Mock,
435+
scale_up_params: _ScaleUpParams,
427436
):
428-
task_template_with_too_many_resource = task_template | create_task_reservations(
429-
1000, 0
430-
)
431-
await create_service(
432-
task_template_with_too_many_resource,
433-
service_monitored_labels,
434-
"pending",
435-
)
437+
await create_services_batch(scale_up_params)
436438

437439
await auto_scale_cluster(
438440
app=initialized_app, auto_scaling_mode=DynamicAutoscaling()
@@ -448,9 +450,7 @@ async def _test_cluster_scaling_up_and_down( # noqa: PLR0915
448450
*,
449451
app_settings: ApplicationSettings,
450452
initialized_app: FastAPI,
451-
create_services_batch: Callable[
452-
[Resources, str | None, int], Awaitable[list[Service]]
453-
],
453+
create_services_batch: Callable[[_ScaleUpParams], Awaitable[list[Service]]],
454454
ec2_client: EC2Client,
455455
mock_docker_tag_node: mock.Mock,
456456
fake_node: Node,
@@ -476,11 +476,7 @@ async def _test_cluster_scaling_up_and_down( # noqa: PLR0915
476476
), "This test is not made to work with more than 1 expected instance. so please adapt if needed"
477477

478478
# create the service(s)
479-
created_docker_services = await create_services_batch(
480-
scale_up_params.service_resources,
481-
scale_up_params.imposed_instance_type,
482-
scale_up_params.num_services,
483-
)
479+
created_docker_services = await create_services_batch(scale_up_params)
484480

485481
# this should trigger a scaling up as we have no nodes
486482
await auto_scale_cluster(
@@ -953,9 +949,7 @@ async def test_cluster_scaling_up_and_down(
953949
minimal_configuration: None,
954950
app_settings: ApplicationSettings,
955951
initialized_app: FastAPI,
956-
create_services_batch: Callable[
957-
[Resources, str | None, int], Awaitable[list[Service]]
958-
],
952+
create_services_batch: Callable[[_ScaleUpParams], Awaitable[list[Service]]],
959953
ec2_client: EC2Client,
960954
mock_docker_tag_node: mock.Mock,
961955
fake_node: Node,
@@ -1023,9 +1017,7 @@ async def test_cluster_scaling_up_and_down_against_aws(
10231017
external_envfile_dict: EnvVarsDict,
10241018
app_settings: ApplicationSettings,
10251019
initialized_app: FastAPI,
1026-
create_services_batch: Callable[
1027-
[Resources, str | None, int], Awaitable[list[Service]]
1028-
],
1020+
create_services_batch: Callable[[_ScaleUpParams], Awaitable[list[Service]]],
10291021
ec2_client: EC2Client,
10301022
mock_docker_tag_node: mock.Mock,
10311023
fake_node: Node,
@@ -1105,9 +1097,7 @@ async def test_cluster_scaling_up_starts_multiple_instances(
11051097
minimal_configuration: None,
11061098
app_settings: ApplicationSettings,
11071099
initialized_app: FastAPI,
1108-
create_services_batch: Callable[
1109-
[Resources, str | None, int], Awaitable[list[Service]]
1110-
],
1100+
create_services_batch: Callable[[_ScaleUpParams], Awaitable[list[Service]]],
11111101
ec2_client: EC2Client,
11121102
mock_docker_tag_node: mock.Mock,
11131103
scale_up_params: _ScaleUpParams,
@@ -1122,11 +1112,7 @@ async def test_cluster_scaling_up_starts_multiple_instances(
11221112
assert not all_instances["Reservations"]
11231113

11241114
# create several tasks that needs more power
1125-
await create_services_batch(
1126-
scale_up_params.service_resources,
1127-
scale_up_params.imposed_instance_type,
1128-
scale_up_params.num_services,
1129-
)
1115+
await create_services_batch(scale_up_params)
11301116

11311117
# run the code
11321118
await auto_scale_cluster(
@@ -1198,9 +1184,7 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915
11981184
ec2_client: EC2Client,
11991185
initialized_app: FastAPI,
12001186
app_settings: ApplicationSettings,
1201-
create_services_batch: Callable[
1202-
[Resources, str | None, int], Awaitable[list[Service]]
1203-
],
1187+
create_services_batch: Callable[[_ScaleUpParams], Awaitable[list[Service]]],
12041188
ec2_instance_custom_tags: dict[str, str],
12051189
instance_type_filters: Sequence[FilterTypeDef],
12061190
async_docker_client: aiodocker.Docker,
@@ -1226,11 +1210,7 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915
12261210

12271211
#
12281212
# 1. create the first batch of services requiring the initial machines
1229-
first_batch_services = await create_services_batch(
1230-
scale_up_params1.service_resources,
1231-
scale_up_params1.imposed_instance_type,
1232-
scale_up_params1.num_services,
1233-
)
1213+
first_batch_services = await create_services_batch(scale_up_params1)
12341214

12351215
# it will only scale once and do nothing else
12361216
await auto_scale_cluster(
@@ -1276,11 +1256,7 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915
12761256

12771257
#
12781258
# 3. now we start the second batch of services requiring a different type of machines
1279-
await create_services_batch(
1280-
scale_up_params2.service_resources,
1281-
scale_up_params2.imposed_instance_type,
1282-
scale_up_params2.num_services,
1283-
)
1259+
await create_services_batch(scale_up_params2)
12841260

12851261
# scaling will do nothing since we have hit the maximum number of machines
12861262
for _ in range(3):
@@ -1467,37 +1443,36 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915
14671443

14681444

14691445
@pytest.mark.parametrize(
1470-
"docker_service_imposed_ec2_type, docker_service_ram, expected_ec2_type",
1446+
"scale_up_params",
14711447
[
14721448
pytest.param(
1473-
None,
1474-
TypeAdapter(ByteSize).validate_python("128Gib"),
1475-
"r5n.4xlarge",
1449+
_ScaleUpParams(
1450+
imposed_instance_type=None,
1451+
service_resources=Resources(
1452+
cpus=4, ram=TypeAdapter(ByteSize).validate_python("128Gib")
1453+
),
1454+
num_services=1,
1455+
expected_instance_type="r5n.4xlarge",
1456+
expected_num_instances=1,
1457+
),
14761458
id="No explicit instance defined",
14771459
),
14781460
],
14791461
)
14801462
async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted(
14811463
with_short_ec2_instances_max_start_time: EnvVarsDict,
14821464
minimal_configuration: None,
1483-
service_monitored_labels: dict[DockerLabelKey, str],
14841465
app_settings: ApplicationSettings,
14851466
initialized_app: FastAPI,
1486-
create_service: Callable[
1487-
[dict[str, Any], dict[DockerLabelKey, str], str, list[str]], Awaitable[Service]
1488-
],
1489-
task_template: dict[str, Any],
1490-
create_task_reservations: Callable[[int, int], dict[str, Any]],
1467+
create_services_batch: Callable[[_ScaleUpParams], Awaitable[list[Service]]],
14911468
ec2_client: EC2Client,
1492-
docker_service_imposed_ec2_type: InstanceTypeType | None,
1493-
docker_service_ram: ByteSize,
1494-
expected_ec2_type: InstanceTypeType,
14951469
mock_find_node_with_name_returns_none: mock.Mock,
14961470
mock_docker_tag_node: mock.Mock,
14971471
mock_rabbitmq_post_message: mock.Mock,
14981472
short_ec2_instance_max_start_time: datetime.timedelta,
14991473
ec2_instance_custom_tags: dict[str, str],
15001474
instance_type_filters: Sequence[FilterTypeDef],
1475+
scale_up_params: _ScaleUpParams,
15011476
):
15021477
assert app_settings.AUTOSCALING_EC2_INSTANCES
15031478
assert (
@@ -1507,19 +1482,8 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted(
15071482
# we have nothing running now
15081483
all_instances = await ec2_client.describe_instances()
15091484
assert not all_instances["Reservations"]
1510-
# create a service
1511-
await create_service(
1512-
task_template | create_task_reservations(4, docker_service_ram),
1513-
service_monitored_labels,
1514-
"pending",
1515-
(
1516-
[
1517-
f"node.labels.{DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY}=={ docker_service_imposed_ec2_type}"
1518-
]
1519-
if docker_service_imposed_ec2_type
1520-
else []
1521-
),
1522-
)
1485+
await create_services_batch(scale_up_params)
1486+
15231487
# this should trigger a scaling up as we have no nodes
15241488
await auto_scale_cluster(
15251489
app=initialized_app, auto_scaling_mode=DynamicAutoscaling()
@@ -1529,8 +1493,8 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted(
15291493
instances = await assert_autoscaled_dynamic_ec2_instances(
15301494
ec2_client,
15311495
expected_num_reservations=1,
1532-
expected_num_instances=1,
1533-
expected_instance_type=expected_ec2_type,
1496+
expected_num_instances=scale_up_params.expected_num_instances,
1497+
expected_instance_type=scale_up_params.expected_instance_type,
15341498
expected_instance_state="running",
15351499
expected_additional_tag_keys=list(ec2_instance_custom_tags),
15361500
instance_filters=instance_type_filters,
@@ -1545,7 +1509,7 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted(
15451509
app_settings,
15461510
initialized_app,
15471511
instances_running=0,
1548-
instances_pending=1,
1512+
instances_pending=scale_up_params.expected_num_instances,
15491513
)
15501514
mock_rabbitmq_post_message.reset_mock()
15511515

@@ -1572,8 +1536,8 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted(
15721536
instances = await assert_autoscaled_dynamic_ec2_instances(
15731537
ec2_client,
15741538
expected_num_reservations=1,
1575-
expected_num_instances=1,
1576-
expected_instance_type=expected_ec2_type,
1539+
expected_num_instances=scale_up_params.expected_num_instances,
1540+
expected_instance_type=scale_up_params.expected_instance_type,
15771541
expected_instance_state="running",
15781542
expected_additional_tag_keys=list(ec2_instance_custom_tags),
15791543
instance_filters=instance_type_filters,
@@ -1585,7 +1549,7 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted(
15851549
app_settings,
15861550
initialized_app,
15871551
instances_running=0,
1588-
instances_pending=1,
1552+
instances_pending=scale_up_params.expected_num_instances,
15891553
)
15901554
mock_rabbitmq_post_message.reset_mock()
15911555
assert instances
@@ -1616,7 +1580,10 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted(
16161580
all_instances = await ec2_client.describe_instances()
16171581
assert len(all_instances["Reservations"]) == 2
16181582
assert "Instances" in all_instances["Reservations"][0]
1619-
assert len(all_instances["Reservations"][0]["Instances"]) == 1
1583+
assert (
1584+
len(all_instances["Reservations"][0]["Instances"])
1585+
== scale_up_params.expected_num_instances
1586+
)
16201587
assert "State" in all_instances["Reservations"][0]["Instances"][0]
16211588
assert "Name" in all_instances["Reservations"][0]["Instances"][0]["State"]
16221589
assert (
@@ -1625,7 +1592,10 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted(
16251592
)
16261593

16271594
assert "Instances" in all_instances["Reservations"][1]
1628-
assert len(all_instances["Reservations"][1]["Instances"]) == 1
1595+
assert (
1596+
len(all_instances["Reservations"][1]["Instances"])
1597+
== scale_up_params.expected_num_instances
1598+
)
16291599
assert "State" in all_instances["Reservations"][1]["Instances"][0]
16301600
assert "Name" in all_instances["Reservations"][1]["Instances"][0]["State"]
16311601
assert (

0 commit comments

Comments
 (0)