From 3a717f02a6d1f22bf07dc90368bff0b0116df317 Mon Sep 17 00:00:00 2001 From: GioeleB00 Date: Sun, 3 Aug 2025 16:28:28 +0200 Subject: [PATCH 1/3] added the first method to the simulation runner --- src/app/runtime/actors/server.py | 22 +++++++-- src/app/runtime/simulation_runner.py | 69 ++++++++++++++++++++++++++++ 2 files changed, 86 insertions(+), 5 deletions(-) create mode 100644 src/app/runtime/simulation_runner.py diff --git a/src/app/runtime/actors/server.py b/src/app/runtime/actors/server.py index bf6b0b6..9051d99 100644 --- a/src/app/runtime/actors/server.py +++ b/src/app/runtime/actors/server.py @@ -29,18 +29,30 @@ class ServerRuntime: """class to define the server during the simulation""" - def __init__( # noqa: PLR0913 + def __init__( # noqa: PLR0913 self, *, env: simpy.Environment, server_resources: ServerContainers, server_config: Server, - out_edge: EdgeRuntime, + out_edge: EdgeRuntime | None, server_box: simpy.Store, settings: SimulationSettings, rng: np.random.Generator | None = None, - ) -> None: - """Docstring to complete""" + ) -> None: + """ + Definition of the instance attributes + Args: + env (simpy.Environment): simpy environment + server_resources (ServerContainers):resource defined in the + input for each server + server_config (Server): parameter to define the server from the input + out_edge (EdgeRuntime): edge connecting the server to the next node + server_box (simpy.Store): box with the states that the server + should elaborate + settings (SimulationSettings): general input settings for the simulation + rng (np.random.Generator | None, optional): random number generator. + """ self.env = env self.server_resources = server_resources self.server_config = server_config @@ -231,6 +243,7 @@ def _handle_request( # noqa: PLR0915, PLR0912, C901 self._ram_in_use -= total_ram yield self.server_resources[ServerResourceName.RAM.value].put(total_ram) + assert self.out_edge is not None self.out_edge.transport(state) @@ -263,7 +276,6 @@ def _dispatcher(self) -> Generator[simpy.Event, None, None]: The main dispatcher loop. It pulls requests from the inbox and spawns a new '_handle_request' process for each one. """ - assert self.out_edge is not None while True: # Wait for a request to arrive in the server's inbox raw_state = yield self.server_box.get() diff --git a/src/app/runtime/simulation_runner.py b/src/app/runtime/simulation_runner.py new file mode 100644 index 0000000..3eadfb2 --- /dev/null +++ b/src/app/runtime/simulation_runner.py @@ -0,0 +1,69 @@ +"""Components to run the whole simulation given specific input data""" + +from typing import TYPE_CHECKING + +import numpy as np +import simpy + +from app.resources.registry import ResourcesRuntime +from app.runtime.actors.server import ServerRuntime +from app.schemas.full_simulation_input import SimulationPayload + +if TYPE_CHECKING: + from app.schemas.system_topology.full_system_topology import ( + Client, + LoadBalancer, + Server, +) + + + +class SimulationRunner: + """Class to handle the simulation""" + + def __init__( + self, + *, + env: simpy.Environment, + simulation_input: SimulationPayload, + ) -> None: + """Docstring to generate""" + self.env = env + self.simulation_input = simulation_input + + # instantiation of object needed to build nodes for the runtime phase + self.servers: list[Server] = simulation_input.topology_graph.nodes.servers + self.client: Client = simulation_input.topology_graph.nodes.client + self.lb: LoadBalancer | None = ( + simulation_input.topology_graph.nodes.load_balancer + ) + self.simulation_settings = simulation_input.sim_settings + self.rng = np.random.default_rng() + + # Object needed to start the simualation + self._servers_runtime: dict[str, ServerRuntime] = {} + + def _build_servers(self) -> dict[str, ServerRuntime]: + """ + Build given the input data a dict containing all server Runtime + indexed by their unique id + """ + registry = ResourcesRuntime( + env=self.env, + data=self.simulation_input.topology_graph, + ) + for server in self.servers: + container = registry[server.id] + self._servers_runtime[server.id] = ServerRuntime( + env=self.env, + server_resources=container, + server_config=server, + out_edge=None, + server_box=simpy.Store(self.env), + settings=self.simulation_settings, + rng= self.rng, + + ) + + + return self._servers_runtime From 8874e80c4ad1b026a5c3b3619b4fade38753bb29 Mon Sep 17 00:00:00 2001 From: GioeleB00 Date: Sun, 3 Aug 2025 19:23:28 +0200 Subject: [PATCH 2/3] added methods to handle nodes --- src/app/runtime/actors/rqs_generator.py | 2 +- src/app/runtime/simulation_runner.py | 92 ++++++++++++++++++++++--- 2 files changed, 82 insertions(+), 12 deletions(-) diff --git a/src/app/runtime/actors/rqs_generator.py b/src/app/runtime/actors/rqs_generator.py index b666d5d..d9e7e38 100644 --- a/src/app/runtime/actors/rqs_generator.py +++ b/src/app/runtime/actors/rqs_generator.py @@ -35,7 +35,7 @@ def __init__( self, *, env: simpy.Environment, - out_edge: EdgeRuntime, + out_edge: EdgeRuntime | None, rqs_generator_data: RqsGeneratorInput, sim_settings: SimulationSettings, rng: np.random.Generator | None = None, diff --git a/src/app/runtime/simulation_runner.py b/src/app/runtime/simulation_runner.py index 3eadfb2..d76327f 100644 --- a/src/app/runtime/simulation_runner.py +++ b/src/app/runtime/simulation_runner.py @@ -6,15 +6,19 @@ import simpy from app.resources.registry import ResourcesRuntime +from app.runtime.actors.client import ClientRuntime +from app.runtime.actors.load_balancer import LoadBalancerRuntime +from app.runtime.actors.rqs_generator import RqsGeneratorRuntime from app.runtime.actors.server import ServerRuntime from app.schemas.full_simulation_input import SimulationPayload if TYPE_CHECKING: + from app.schemas.rqs_generator_input import RqsGeneratorInput from app.schemas.system_topology.full_system_topology import ( - Client, - LoadBalancer, - Server, -) + Client, + LoadBalancer, + Server, + ) @@ -27,21 +31,68 @@ def __init__( env: simpy.Environment, simulation_input: SimulationPayload, ) -> None: - """Docstring to generate""" + """ + Orchestrates building, wiring and running all actor runtimes. + + Args: + env (simpy.Environment): global environment for the simulation + simulation_input (SimulationPayload): full input for the simulation + + """ self.env = env self.simulation_input = simulation_input # instantiation of object needed to build nodes for the runtime phase self.servers: list[Server] = simulation_input.topology_graph.nodes.servers self.client: Client = simulation_input.topology_graph.nodes.client - self.lb: LoadBalancer | None = ( - simulation_input.topology_graph.nodes.load_balancer - ) + self.rqs_generator: RqsGeneratorInput = simulation_input.rqs_input + self.lb: LoadBalancer | None = None self.simulation_settings = simulation_input.sim_settings self.rng = np.random.default_rng() # Object needed to start the simualation self._servers_runtime: dict[str, ServerRuntime] = {} + self._client_runtime: dict[str, ClientRuntime] = {} + self._rqs_runtime: dict[str, RqsGeneratorRuntime] = {} + self._lb_runtime: dict[str, LoadBalancerRuntime] = {} + + def _make_inbox(self) -> simpy.Store: # local helper + """Helper to create store for the states of the simulation""" + return simpy.Store(self.env) + + def _build_rqs_generator(self) -> dict[str, RqsGeneratorRuntime]: + """ + Build the rqs generator runtime, we use a dict for one reason + In the future we might add CDN so we will need + multiple generators , one for each client + """ + self._rqs_runtime[self.rqs_generator.id] = RqsGeneratorRuntime( + env = self.env, + out_edge=None, + rqs_generator_data=self.rqs_generator, + sim_settings=self.simulation_settings, + rng=self.rng, + ) + + return self._rqs_runtime + + def _build_client_runtime(self) -> dict[str, ClientRuntime]: + """ + Build the client runtime, we use a dict for two reasons + 1) In the future we might add CDN so we will need + multiple client + 2) When we will assign outer edges we will need a dict + with all components indexed by their id + """ + self._client_runtime[self.client.id] = ClientRuntime( + env=self.env, + out_edge=None, + completed_box=self._make_inbox(), + client_box=self._make_inbox(), + client_config=self.client, + ) + + return self._client_runtime def _build_servers(self) -> dict[str, ServerRuntime]: """ @@ -59,11 +110,30 @@ def _build_servers(self) -> dict[str, ServerRuntime]: server_resources=container, server_config=server, out_edge=None, - server_box=simpy.Store(self.env), + server_box=self._make_inbox(), settings=self.simulation_settings, rng= self.rng, ) - - return self._servers_runtime + + def _build_load_balancer(self) -> dict[str, LoadBalancerRuntime]: + """ + Build given the input data the load balancer runtime we will + use a dict because we may have multiple load balancer and we + will be usefull to assign outer edges + """ + assert self.simulation_input.topology_graph.nodes.load_balancer is not None + self.lb = self.simulation_input.topology_graph.nodes.load_balancer + + self._lb_runtime[self.lb.id] = LoadBalancerRuntime( + env=self.env, + lb_config=self.lb, + out_edges= None, + lb_box=self._make_inbox(), + ) + + return self._lb_runtime + + + From 1e86d96c5806c125b483c8d04823ffa40f298bcb Mon Sep 17 00:00:00 2001 From: GioeleB00 Date: Wed, 6 Aug 2025 16:22:11 +0200 Subject: [PATCH 3/3] completed simulation runtime + integration test --- poetry.lock | 75 ++++++++- pyproject.toml | 2 + src/app/metrics/analyzer.py | 6 +- src/app/runtime/simulation_runner.py | 196 ++++++++++++++++++++-- tests/integration/conftest.py | 46 +++++ tests/integration/minimal/conftest.py | 80 +++++++++ tests/integration/minimal/test_minimal.py | 100 +++++++++++ 7 files changed, 491 insertions(+), 14 deletions(-) create mode 100644 tests/integration/conftest.py create mode 100644 tests/integration/minimal/conftest.py create mode 100644 tests/integration/minimal/test_minimal.py diff --git a/poetry.lock b/poetry.lock index 543faa8..f698f6c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1076,6 +1076,68 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + [[package]] name = "ruff" version = "0.12.5" @@ -1125,6 +1187,17 @@ files = [ {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250516" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.9" +files = [ + {file = "types_pyyaml-6.0.12.20250516-py3-none-any.whl", hash = "sha256:8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530"}, + {file = "types_pyyaml-6.0.12.20250516.tar.gz", hash = "sha256:9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba"}, +] + [[package]] name = "typing-extensions" version = "4.14.1" @@ -1153,4 +1226,4 @@ typing-extensions = ">=4.12.0" [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "1441494d6783804e41cd06a1a630c2031b47573ba6f28619d4967e27953aff40" +content-hash = "b7b303ed9df0a73da71790803bfde02b8dc243871801686bb7a3127ca3f638aa" diff --git a/pyproject.toml b/pyproject.toml index 1528987..22fcaa2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,7 @@ pydantic = {extras = ["email"], version = "^2.11.7"} numpy = "^2.3.1" simpy = "^4.1.1" matplotlib = "^3.10.3" +pyyaml = "^6.0.2" [tool.poetry.group.dev.dependencies] pytest = "^8.4.1" @@ -23,6 +24,7 @@ pytest-asyncio = "^1.0.0" pytest-cov = "^6.2.1" mypy = "^1.16.1" ruff = "^0.12.1" +types-pyyaml = "^6.0.12.20250516" [build-system] requires = ["poetry-core"] diff --git a/src/app/metrics/analyzer.py b/src/app/metrics/analyzer.py index 07a021e..762e5c6 100644 --- a/src/app/metrics/analyzer.py +++ b/src/app/metrics/analyzer.py @@ -158,9 +158,11 @@ def get_latency_stats(self) -> dict[LatencyKey, float]: return self.latency_stats or {} def get_throughput_series(self) -> tuple[list[float], list[float]]: - """Return throughput time series (timestamps, RPS).""" + """Return (timestamps, RPS). Empty lists when no traffic.""" self.process_all_metrics() - assert self.throughput_series is not None + if self.throughput_series is None: + return [], [] + return self.throughput_series def get_sampled_metrics(self) -> dict[str, dict[str, list[float]]]: diff --git a/src/app/runtime/simulation_runner.py b/src/app/runtime/simulation_runner.py index d76327f..e2b5c6f 100644 --- a/src/app/runtime/simulation_runner.py +++ b/src/app/runtime/simulation_runner.py @@ -1,26 +1,44 @@ """Components to run the whole simulation given specific input data""" -from typing import TYPE_CHECKING +from itertools import chain +from pathlib import Path +from typing import TYPE_CHECKING, Protocol, cast import numpy as np import simpy +import yaml +from app.metrics.analyzer import ResultsAnalyzer +from app.metrics.collector import SampledMetricCollector from app.resources.registry import ResourcesRuntime from app.runtime.actors.client import ClientRuntime +from app.runtime.actors.edge import EdgeRuntime from app.runtime.actors.load_balancer import LoadBalancerRuntime from app.runtime.actors.rqs_generator import RqsGeneratorRuntime from app.runtime.actors.server import ServerRuntime from app.schemas.full_simulation_input import SimulationPayload if TYPE_CHECKING: + from collections.abc import Iterable + from app.schemas.rqs_generator_input import RqsGeneratorInput from app.schemas.system_topology.full_system_topology import ( Client, + Edge, LoadBalancer, Server, ) +# --- PROTOCOL DEFINITION --- +# This is the contract that all runtime actors must follow. +# it is a contract useful to communicate to mypy that object of +# startable type have all the method start +class Startable(Protocol): + """A protocol for runtime actors that can be started.""" + def start(self) -> simpy.Process: + """Starts the main process loop for the actor.""" + ... class SimulationRunner: """Class to handle the simulation""" @@ -48,6 +66,7 @@ def __init__( self.rqs_generator: RqsGeneratorInput = simulation_input.rqs_input self.lb: LoadBalancer | None = None self.simulation_settings = simulation_input.sim_settings + self.edges: list[Edge] = simulation_input.topology_graph.edges self.rng = np.random.default_rng() # Object needed to start the simualation @@ -55,12 +74,18 @@ def __init__( self._client_runtime: dict[str, ClientRuntime] = {} self._rqs_runtime: dict[str, RqsGeneratorRuntime] = {} self._lb_runtime: dict[str, LoadBalancerRuntime] = {} + self._edges_runtime: dict[tuple[str, str], EdgeRuntime] = {} + + + # ------------------------------------------------------------------ # + # Private: build phase # + # ------------------------------------------------------------------ # def _make_inbox(self) -> simpy.Store: # local helper """Helper to create store for the states of the simulation""" return simpy.Store(self.env) - def _build_rqs_generator(self) -> dict[str, RqsGeneratorRuntime]: + def _build_rqs_generator(self) -> None: """ Build the rqs generator runtime, we use a dict for one reason In the future we might add CDN so we will need @@ -74,9 +99,8 @@ def _build_rqs_generator(self) -> dict[str, RqsGeneratorRuntime]: rng=self.rng, ) - return self._rqs_runtime - def _build_client_runtime(self) -> dict[str, ClientRuntime]: + def _build_client(self) -> None: """ Build the client runtime, we use a dict for two reasons 1) In the future we might add CDN so we will need @@ -92,9 +116,8 @@ def _build_client_runtime(self) -> dict[str, ClientRuntime]: client_config=self.client, ) - return self._client_runtime - def _build_servers(self) -> dict[str, ServerRuntime]: + def _build_servers(self) -> None: """ Build given the input data a dict containing all server Runtime indexed by their unique id @@ -115,25 +138,176 @@ def _build_servers(self) -> dict[str, ServerRuntime]: rng= self.rng, ) - return self._servers_runtime - def _build_load_balancer(self) -> dict[str, LoadBalancerRuntime]: + + def _build_load_balancer(self) -> None: """ Build given the input data the load balancer runtime we will use a dict because we may have multiple load balancer and we will be usefull to assign outer edges """ - assert self.simulation_input.topology_graph.nodes.load_balancer is not None + # Topologies without a LB are perfectly legal (e.g. the “minimal” + # integration test). Early-return instead of asserting. + if self.simulation_input.topology_graph.nodes.load_balancer is None: + return + self.lb = self.simulation_input.topology_graph.nodes.load_balancer self._lb_runtime[self.lb.id] = LoadBalancerRuntime( env=self.env, lb_config=self.lb, - out_edges= None, + out_edges= [], lb_box=self._make_inbox(), ) - return self._lb_runtime + + def _build_edges(self) -> None: + """Initialization of the edges runtime dictionary from the input data""" + # We need to merge all previous dictionary for the nodes to assign + # for each edge the correct target box + all_nodes: dict[str, object] = { + **self._servers_runtime, + **self._client_runtime, + **self._lb_runtime, + **self._rqs_runtime, +} + + for edge in self.edges: + + target_object = all_nodes[edge.target] # O(1) lookup + + if isinstance(target_object, ServerRuntime): + target_box = target_object.server_box + elif isinstance(target_object, ClientRuntime): + target_box = target_object.client_box + elif isinstance(target_object, LoadBalancerRuntime): + target_box = target_object.lb_box + else: + msg = f"Unknown runtime for {edge.target!r}" + raise TypeError(msg) + + self._edges_runtime[(edge.source, edge.target)] = ( + EdgeRuntime( + env=self.env, + edge_config=edge, + rng=self.rng, + target_box= target_box, + settings=self.simulation_settings, + ) + ) + # Here we assign the outer edges to all nodes + source_object = all_nodes[edge.source] + + if isinstance(source_object, ( + ServerRuntime, + ClientRuntime, + RqsGeneratorRuntime, + )): + source_object.out_edge = self._edges_runtime[( + edge.source, + edge.target) + ] + elif isinstance(source_object, LoadBalancerRuntime): + assert source_object.out_edges is not None + source_object.out_edges.append(self._edges_runtime[( + edge.source, + edge.target, + ) + ]) + + else: + msg = f"Unknown runtime for {edge.source!r}" + raise TypeError(msg) + + + # ------------------------------------------------------------------ # + # RUN phase # + # ------------------------------------------------------------------ # + def _start_all_processes(self) -> None: + """Register every .start() in the environment.""" + # ------------------------------------------------------------------ + # Start every actor's main coroutine + # + # * itertools.chain lazily stitches together the four dict_views + # into ONE iterator. No temporary list is built, zero extra + # allocations, yet the for-loop stays single and readable. + # * Order matters only for determinism, so we keep the natural + # “generator → client → servers → LB” sequence by listing the + # dicts explicitly. + # * Alternative ( list(a)+list(b)+… ) would copy thousands of + # references just to throw them away after the loop - wasteful. + # ------------------------------------------------------------------ + + runtimes = chain( + self._rqs_runtime.values(), + self._client_runtime.values(), + self._servers_runtime.values(), + self._lb_runtime.values(), + ) + + # Here we are saying to mypy that those object are of + # the startable type and they share the start method + for rt in cast("Iterable[Startable]", runtimes): + rt.start() + + def _start_metric_collector(self) -> None: + """One coroutine that snapshots RAM / queues / connections.""" + SampledMetricCollector( + edges=list(self._edges_runtime.values()), + servers=list(self._servers_runtime.values()), + env=self.env, + sim_settings=self.simulation_settings, + ).start() + + # ------------------------------------------------------------------ # + # Public entry-point # + # ------------------------------------------------------------------ # + def run(self) -> ResultsAnalyzer: + """Build → wire → start → run the clock → return `ResultsAnalyzer`""" + # 1. BUILD + self._build_rqs_generator() + self._build_client() + self._build_servers() + self._build_load_balancer() + + # 2. WIRE + self._build_edges() + + # 3. START ALL COROUTINES + self._start_all_processes() + self._start_metric_collector() + + # 4. ADVANCE THE SIMULATION + self.env.run(until=self.simulation_settings.total_simulation_time) + + return ResultsAnalyzer( + client=next(iter(self._client_runtime.values())), + servers=list(self._servers_runtime.values()), + edges=list(self._edges_runtime.values()), + settings=self.simulation_settings, + ) + + # ------------------------------------------------------------------ # + # Convenience constructor (load from YAML) # + # ------------------------------------------------------------------ # + @classmethod + def from_yaml( + cls, + *, + env: simpy.Environment, + yaml_path: str | Path, + ) -> "SimulationRunner": + """ + Quick helper so that integration tests & CLI can do: + + ```python + runner = SimulationRunner.from_yaml(env, "scenario.yml") + results = runner.run() + ``` + """ + data = yaml.safe_load(Path(yaml_path).read_text()) + payload = SimulationPayload.model_validate(data) + return cls(env=env, simulation_input=payload) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 0000000..b46300b --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,46 @@ +"""Shared fixtures used by several integration-test groups.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest +import simpy + +from app.runtime.simulation_runner import SimulationRunner + +if TYPE_CHECKING: + from collections.abc import Callable + from pathlib import Path + + +# --------------------------------------------------------------------------- # +# Environment # +# --------------------------------------------------------------------------- # +@pytest.fixture +def env() -> simpy.Environment: + """A fresh SimPy environment per test.""" + return simpy.Environment() + + +# --------------------------------------------------------------------------- # +# Runner factory (load YAML scenarios) # +# --------------------------------------------------------------------------- # +@pytest.fixture +def make_runner( + env: simpy.Environment, +) -> Callable[[str | Path], SimulationRunner]: + """ + Factory that loads a YAML scenario and instantiates a + :class:`SimulationRunner`. + + Usage inside a test:: + + runner = make_runner("scenarios/minimal.yml") + results = runner.run() + """ + + def _factory(yaml_path: str | Path) -> SimulationRunner: + return SimulationRunner.from_yaml(env=env, yaml_path=yaml_path) + + return _factory diff --git a/tests/integration/minimal/conftest.py b/tests/integration/minimal/conftest.py new file mode 100644 index 0000000..1c9f81e --- /dev/null +++ b/tests/integration/minimal/conftest.py @@ -0,0 +1,80 @@ +""" +Local fixtures for the *minimal* integration scenario. + +We **do not** add any Edge to the TopologyGraph because the core schema +forbids generator-origin edges. Instead we patch the single +`RqsGeneratorRuntime` after the `SimulationRunner` is built, giving it a +*no-op* EdgeRuntime so its internal assertion passes. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest +import simpy + +from app.config.constants import TimeDefaults +from app.runtime.simulation_runner import SimulationRunner +from app.schemas.random_variables_config import RVConfig +from app.schemas.rqs_generator_input import RqsGeneratorInput + +if TYPE_CHECKING: + from app.schemas.full_simulation_input import SimulationPayload + from app.schemas.full_simulation_input import ( + SimulationPayload as _Payload, # noqa: F401 + ) + from app.schemas.rqs_generator_input import ( + RqsGeneratorInput as _RqsIn, # noqa: F401 + ) + +# ────────────────────────────────────────────────────────────────────────────── +# 0-traffic generator (shadows the project-wide fixture) +# ────────────────────────────────────────────────────────────────────────────── +@pytest.fixture(scope="session") +def rqs_input() -> RqsGeneratorInput: + """A generator that never emits any request.""" + return RqsGeneratorInput( + id="rqs-zero", + avg_active_users=RVConfig(mean=0.0), + avg_request_per_minute_per_user=RVConfig(mean=0.0), + user_sampling_window=TimeDefaults.USER_SAMPLING_WINDOW, + ) + + +# ────────────────────────────────────────────────────────────────────────────── +# SimPy env - local to this directory +# ────────────────────────────────────────────────────────────────────────────── +@pytest.fixture +def env() -> simpy.Environment: + """Fresh environment per test module.""" + return simpy.Environment() + + +class _NoOpEdge: + """EdgeRuntime stand-in that simply discards every state.""" + + def transport(self, _state: object) -> None: # ANN001: _state annotated + return # swallow the request silently + + +# ────────────────────────────────────────────────────────────────────────────── +# Runner factory - assigns the dummy edge *after* building the runner +# ────────────────────────────────────────────────────────────────────────────── +@pytest.fixture +def runner( + env: simpy.Environment, + payload_base: SimulationPayload, +) -> SimulationRunner: + """Build a `SimulationRunner` and patch the generator's `out_edge`.""" + sim_runner = SimulationRunner(env=env, simulation_input=payload_base) + + def _patch_noop_edge(r: SimulationRunner) -> None: + + gen_rt = next(iter(r._rqs_runtime.values())) # noqa: SLF001 + gen_rt.out_edge = _NoOpEdge() # type: ignore[assignment] + + + sim_runner._patch_noop_edge = _patch_noop_edge # type: ignore[attr-defined] # noqa: SLF001 + + return sim_runner diff --git a/tests/integration/minimal/test_minimal.py b/tests/integration/minimal/test_minimal.py new file mode 100644 index 0000000..0ae77c2 --- /dev/null +++ b/tests/integration/minimal/test_minimal.py @@ -0,0 +1,100 @@ +""" +Smoke-test: the **smallest** valid topology boots, ticks and +shuts down without recording any metric. + +Topology under test +------------------- +generator ──Ø── client (Ø == no real EdgeRuntime) + +The request-generator cannot emit messages because its ``out_edge`` is +replaced by a no-op stub. The client is patched the same way so its own +forwarder never attempts a network send. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest +import simpy + +from app.metrics.analyzer import ResultsAnalyzer +from app.runtime.simulation_runner import SimulationRunner + +if TYPE_CHECKING: + from app.schemas.full_simulation_input import SimulationPayload + + +# --------------------------------------------------------------------------- # +# Helpers # +# --------------------------------------------------------------------------- # + +class _NoOpEdge: + """Edge stub: swallows every transport call.""" + + def transport(self) -> None: + # Nothing to do - we just black-hole the message. + return + + +# --------------------------------------------------------------------------- # +# Local fixtures # +# --------------------------------------------------------------------------- # +@pytest.fixture +def env() -> simpy.Environment: + """Fresh SimPy environment for this test file.""" + return simpy.Environment() + + +@pytest.fixture +def runner( + env: simpy.Environment, + payload_base: SimulationPayload, # comes from project-wide conftest +) -> SimulationRunner: + """SimulationRunner already loaded with *minimal* payload.""" + return SimulationRunner(env=env, simulation_input=payload_base) + + +# --------------------------------------------------------------------------- # +# Tests # +# --------------------------------------------------------------------------- # +def test_smoke_minimal_runs(runner: SimulationRunner) -> None: + """ + The simulation should: + + * start without any server or edge, + * execute its clock, + * leave all metric collections empty. + """ + # ── 1. Build generator + patch its edge ────────────────────────────── + runner._build_rqs_generator() # noqa: SLF001 - private builder ok in test + gen_rt = next(iter(runner._rqs_runtime.values())) # noqa: SLF001 + gen_rt.out_edge = _NoOpEdge() # type: ignore[assignment] + + # ── 2. Build client + patch its edge ───────────────────────────────── + runner._build_client() # noqa: SLF001 + cli_rt = next(iter(runner._client_runtime.values())) # noqa: SLF001 + cli_rt.out_edge = _NoOpEdge() # type: ignore[assignment] + + # ── 3. Build remaining artefacts (no servers / no LB present) ─────── + runner._start_all_processes() # noqa: SLF001 + runner._start_metric_collector() # noqa: SLF001 + + # ── 4. Run the clock ───────────────────────────────────────────────── + runner.env.run(until=runner.simulation_settings.total_simulation_time) + + # ── 5. Post-processing - everything must be empty ─────────────────── + results: ResultsAnalyzer = ResultsAnalyzer( + client=cli_rt, + servers=[], # none built + edges=[], # none built + settings=runner.simulation_settings, + ) + + # No latencies were produced + assert results.get_latency_stats() == {} + # Throughput time-series must be entirely empty + timestamps, rps = results.get_throughput_series() + assert timestamps == [] + # No sampled metrics either + assert results.get_sampled_metrics() == {}