diff --git a/.devcontainer/onCreateCommand.sh b/.devcontainer/onCreateCommand.sh index cbbc87a8e1..1a61fc2582 100755 --- a/.devcontainer/onCreateCommand.sh +++ b/.devcontainer/onCreateCommand.sh @@ -8,4 +8,4 @@ poetry install --no-interaction --no-ansi git submodule update --init -invoke demo.pull +poetry run invoke demo.pull diff --git a/.devcontainer/postCreateCommand.sh b/.devcontainer/postCreateCommand.sh index a574523175..be741d5d0b 100755 --- a/.devcontainer/postCreateCommand.sh +++ b/.devcontainer/postCreateCommand.sh @@ -2,4 +2,4 @@ git pull git submodule update -invoke demo.start --wait +poetry run invoke demo.start --wait diff --git a/.devcontainer/updateContentCommand.sh b/.devcontainer/updateContentCommand.sh index eb522f6425..9a5d83499d 100755 --- a/.devcontainer/updateContentCommand.sh +++ b/.devcontainer/updateContentCommand.sh @@ -1,12 +1,12 @@ #!/bin/bash export WEB_CONCURRENCY=2 -invoke demo.start +poetry run invoke demo.start sleep 120 docker logs infrahub-server-1 -invoke demo.load-infra-schema +poetry run invoke demo.load-infra-schema docker logs infrahub-server-1 sleep 90 docker logs infrahub-server-1 -invoke demo.load-infra-data -invoke demo.stop +poetry run invoke demo.load-infra-data +poetry run invoke demo.stop diff --git a/backend/infrahub/computed_attribute/tasks.py b/backend/infrahub/computed_attribute/tasks.py index d4f2cd0b04..f9baabbc5a 100644 --- a/backend/infrahub/computed_attribute/tasks.py +++ b/backend/infrahub/computed_attribute/tasks.py @@ -2,6 +2,7 @@ from typing import TYPE_CHECKING +from infrahub_sdk.exceptions import URLNotFoundError from infrahub_sdk.protocols import CoreTransformPython from infrahub_sdk.template import Jinja2Template from prefect import flow @@ -229,7 +230,13 @@ async def process_jinja2( for id_filter in computed_macro.node_filters: query = attribute_graphql.render_graphql_query(query_filter=id_filter, filter_id=object_id) - response = await client.execute_graphql(query=query, branch_name=branch_name) + try: + response = await client.execute_graphql(query=query, branch_name=branch_name) + except URLNotFoundError: + log.warning( + f"Process computed attributes for {computed_attribute_kind}.{computed_attribute_name} failed for branch {branch_name} (not found)" + ) + return output = attribute_graphql.parse_response(response=response) found.extend(output) diff --git a/backend/infrahub/core/graph/__init__.py b/backend/infrahub/core/graph/__init__.py index 1ab4ff997c..6456fb4273 100644 --- a/backend/infrahub/core/graph/__init__.py +++ b/backend/infrahub/core/graph/__init__.py @@ -1 +1 @@ -GRAPH_VERSION = 42 +GRAPH_VERSION = 43 diff --git a/backend/infrahub/core/migrations/__init__.py b/backend/infrahub/core/migrations/__init__.py index b177618576..2de37a15b7 100644 --- a/backend/infrahub/core/migrations/__init__.py +++ b/backend/infrahub/core/migrations/__init__.py @@ -1,5 +1,6 @@ from .schema.attribute_kind_update import AttributeKindUpdateMigration from .schema.attribute_name_update import AttributeNameUpdateMigration +from .schema.attribute_supports_profile import AttributeSupportsProfileUpdateMigration from .schema.node_attribute_add import NodeAttributeAddMigration from .schema.node_attribute_remove import NodeAttributeRemoveMigration from .schema.node_kind_update import NodeKindUpdateMigration @@ -19,6 +20,8 @@ "attribute.name.update": AttributeNameUpdateMigration, "attribute.branch.update": None, "attribute.kind.update": AttributeKindUpdateMigration, + "attribute.optional.update": AttributeSupportsProfileUpdateMigration, + "attribute.read_only.update": AttributeSupportsProfileUpdateMigration, "relationship.branch.update": None, "relationship.direction.update": None, "relationship.identifier.update": PlaceholderDummyMigration, diff --git a/backend/infrahub/core/migrations/graph/__init__.py b/backend/infrahub/core/migrations/graph/__init__.py index ea38937581..c7e661973d 100644 --- a/backend/infrahub/core/migrations/graph/__init__.py +++ b/backend/infrahub/core/migrations/graph/__init__.py @@ -41,9 +41,10 @@ from .m037_index_attr_vals import Migration037 from .m038_redo_0000_prefix_fix import Migration038 from .m039_ipam_reconcile import Migration039 -from .m040_profile_attrs_in_db import Migration040 -from .m041_create_hfid_display_label_in_db import Migration041 -from .m042_backfill_hfid_display_label_in_db import Migration042 +from .m040_duplicated_attributes import Migration040 +from .m041_profile_attrs_in_db import Migration041 +from .m042_create_hfid_display_label_in_db import Migration042 +from .m043_backfill_hfid_display_label_in_db import Migration043 if TYPE_CHECKING: from infrahub.core.root import Root @@ -93,6 +94,7 @@ Migration040, Migration041, Migration042, + Migration043, ] diff --git a/backend/infrahub/core/migrations/graph/m013_convert_git_password_credential.py b/backend/infrahub/core/migrations/graph/m013_convert_git_password_credential.py index 97196a9d18..350ca73316 100644 --- a/backend/infrahub/core/migrations/graph/m013_convert_git_password_credential.py +++ b/backend/infrahub/core/migrations/graph/m013_convert_git_password_credential.py @@ -286,7 +286,7 @@ def __init__(self, **kwargs: Any): kwargs.pop("branch", None) super().__init__( - node_kind="CoreGenericRepository", + node_kinds=["CoreGenericRepository"], attribute_name="internal_status", attribute_kind="Dropdown", branch_support=BranchSupportType.LOCAL.value, diff --git a/backend/infrahub/core/migrations/graph/m040_duplicated_attributes.py b/backend/infrahub/core/migrations/graph/m040_duplicated_attributes.py new file mode 100644 index 0000000000..46a600463b --- /dev/null +++ b/backend/infrahub/core/migrations/graph/m040_duplicated_attributes.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Sequence + +from infrahub.core.migrations.shared import MigrationResult +from infrahub.core.query import Query, QueryType + +from ..shared import GraphMigration + +if TYPE_CHECKING: + from infrahub.database import InfrahubDatabase + + +class DeleteDuplicatedAttributesQuery(Query): + name: str = "delete_duplicated_attributes" + type: QueryType = QueryType.WRITE + insert_return: bool = False + insert_limit: bool = False + + async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002 + query = """ +// ------------- +// get all the Nodes linked to multiple Attributes with the same name to drastically reduce the search space +// ------------- +MATCH (n:Node)-[:HAS_ATTRIBUTE]->(attr:Attribute) +WITH DISTINCT n, attr +WITH n, attr.name AS attr_name, count(*) AS num_attrs +WHERE num_attrs > 1 +// ------------- +// for each Node-attr_name pair, get the possible duplicate Attributes +// ------------- +MATCH (n)-[:HAS_ATTRIBUTE]->(dup_attr:Attribute {name: attr_name}) +WITH DISTINCT n, dup_attr +// ------------- +// get the branch(es) for each possible duplicate Attribute +// ------------- +CALL (n, dup_attr) { + MATCH (n)-[r:HAS_ATTRIBUTE {status: "active"}]->(dup_attr) + WHERE r.to IS NULL + AND NOT exists((n)-[:HAS_ATTRIBUTE {status: "deleted", branch: r.branch}]->(dup_attr)) + RETURN r.branch AS branch +} +// ------------- +// get the latest update time for each duplicate Attribute on each branch +// ------------- +CALL (dup_attr, branch) { + MATCH (dup_attr)-[r {branch: branch}]-() + RETURN max(r.from) AS latest_update +} +// ------------- +// order the duplicate Attributes by latest update time +// ------------- +WITH n, dup_attr, branch, latest_update +ORDER BY n, branch, dup_attr.name, latest_update DESC +// ------------- +// for any Node-dup_attr_name pairs with multiple duplicate Attributes, keep the Attribute with the latest update +// on this branch and delete all the other edges on this branch for this Attribute +// ------------- +WITH n, branch, dup_attr.name AS dup_attr_name, collect(dup_attr) AS dup_attrs_reverse_chronological +WHERE size(dup_attrs_reverse_chronological) > 1 +WITH branch, tail(dup_attrs_reverse_chronological) AS dup_attrs_to_delete +UNWIND dup_attrs_to_delete AS dup_attr_to_delete +MATCH (dup_attr_to_delete)-[r {branch: branch}]-() +DELETE r +// ------------- +// delete any orphaned Attributes +// ------------- +WITH DISTINCT dup_attr_to_delete +WHERE NOT exists((dup_attr_to_delete)--()) +DELETE dup_attr_to_delete + """ + self.add_to_query(query) + + +class Migration040(GraphMigration): + name: str = "040_duplicated_attributes" + queries: Sequence[type[Query]] = [DeleteDuplicatedAttributesQuery] + minimum_version: int = 39 + + async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: # noqa: ARG002 + return MigrationResult() diff --git a/backend/infrahub/core/migrations/graph/m040_profile_attrs_in_db.py b/backend/infrahub/core/migrations/graph/m041_profile_attrs_in_db.py similarity index 98% rename from backend/infrahub/core/migrations/graph/m040_profile_attrs_in_db.py rename to backend/infrahub/core/migrations/graph/m041_profile_attrs_in_db.py index c922933a60..612559f4bd 100644 --- a/backend/infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +++ b/backend/infrahub/core/migrations/graph/m041_profile_attrs_in_db.py @@ -84,7 +84,7 @@ def get_node_ids_by_branch(self) -> dict[str, set[str]]: return nodes_by_branch -class Migration040(ArbitraryMigration): +class Migration041(ArbitraryMigration): """ Save profile attribute values on each node using the profile in the database For any profile that has updates on a given branch (including default branch) @@ -93,8 +93,8 @@ class Migration040(ArbitraryMigration): - run NodeProfilesApplier.apply_profiles on the node on that branch """ - name: str = "040_profile_attrs_in_db" - minimum_version: int = 39 + name: str = "041_profile_attrs_in_db" + minimum_version: int = 40 def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) diff --git a/backend/infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py b/backend/infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py similarity index 96% rename from backend/infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py rename to backend/infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py index 6c57a5ae84..d605a4971a 100644 --- a/backend/infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +++ b/backend/infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py @@ -17,9 +17,9 @@ from infrahub.database import InfrahubDatabase -class Migration041(InternalSchemaMigration): - name: str = "041_create_hfid_display_label_in_db" - minimum_version: int = 40 +class Migration042(InternalSchemaMigration): + name: str = "042_create_hfid_display_label_in_db" + minimum_version: int = 41 @classmethod def init(cls, **kwargs: Any) -> Self: diff --git a/backend/infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py b/backend/infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py similarity index 96% rename from backend/infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py rename to backend/infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py index 2c72ddecdc..676eb2ae41 100644 --- a/backend/infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +++ b/backend/infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py @@ -18,13 +18,13 @@ from infrahub.database import InfrahubDatabase -class Migration042(ArbitraryMigration): +class Migration043(ArbitraryMigration): """ Backfill `human_friendly_id` and `display_label` attributes for nodes with schemas that define them. """ - name: str = "042_backfill_hfid_display_label_in_db" - minimum_version: int = 41 + name: str = "043_backfill_hfid_display_label_in_db" + minimum_version: int = 42 def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) diff --git a/backend/infrahub/core/migrations/query/__init__.py b/backend/infrahub/core/migrations/query/__init__.py index 36179d3f3b..06755630bf 100644 --- a/backend/infrahub/core/migrations/query/__init__.py +++ b/backend/infrahub/core/migrations/query/__init__.py @@ -8,7 +8,12 @@ from ..shared import AttributeSchemaMigration, SchemaMigration -class MigrationQuery(Query): +class MigrationBaseQuery(Query): + def get_nbr_migrations_executed(self) -> int: + return self.num_of_results + + +class MigrationQuery(MigrationBaseQuery): type: QueryType = QueryType.WRITE def __init__( @@ -19,11 +24,8 @@ def __init__( self.migration = migration super().__init__(**kwargs) - def get_nbr_migrations_executed(self) -> int: - return self.num_of_results - -class AttributeMigrationQuery(Query): +class AttributeMigrationQuery(MigrationBaseQuery): type: QueryType = QueryType.WRITE def __init__( @@ -33,6 +35,3 @@ def __init__( ): self.migration = migration super().__init__(**kwargs) - - def get_nbr_migrations_executed(self) -> int: - return self.num_of_results diff --git a/backend/infrahub/core/migrations/query/attribute_add.py b/backend/infrahub/core/migrations/query/attribute_add.py index 647e884c39..e5d1446b07 100644 --- a/backend/infrahub/core/migrations/query/attribute_add.py +++ b/backend/infrahub/core/migrations/query/attribute_add.py @@ -17,14 +17,14 @@ class AttributeAddQuery(Query): def __init__( self, - node_kind: str, + node_kinds: list[str], attribute_name: str, attribute_kind: str, branch_support: str, default_value: Any | None = None, **kwargs: Any, ) -> None: - self.node_kind = node_kind + self.node_kinds = node_kinds self.attribute_name = attribute_name self.attribute_kind = attribute_kind self.branch_support = branch_support @@ -36,7 +36,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) self.params.update(branch_params) - self.params["node_kind"] = self.node_kind + self.params["node_kinds"] = self.node_kinds self.params["attr_name"] = self.attribute_name self.params["branch_support"] = self.branch_support self.params["current_time"] = self.at.to_string() @@ -79,12 +79,13 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No LIMIT 1 """ % {"attr_value_label": attr_value_label} + node_kinds_str = "|".join(self.node_kinds) query = """ %(match_query)s MERGE (is_protected_value:Boolean { value: $is_protected_default }) MERGE (is_visible_value:Boolean { value: $is_visible_default }) WITH av, is_protected_value, is_visible_value - MATCH p = (n:%(node_kind)s) + MATCH (n:%(node_kinds_str)s) CALL (n) { MATCH (:Root)<-[r:IS_PART_OF]-(n) WHERE %(branch_filter)s @@ -110,7 +111,7 @@ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> No """ % { "match_query": match_query, "branch_filter": branch_filter, - "node_kind": self.node_kind, + "node_kinds_str": node_kinds_str, "uuid_generation": db.render_uuid_generation(node_label="a", node_attr="uuid"), } diff --git a/backend/infrahub/core/migrations/query/attribute_remove.py b/backend/infrahub/core/migrations/query/attribute_remove.py new file mode 100644 index 0000000000..b4826773a7 --- /dev/null +++ b/backend/infrahub/core/migrations/query/attribute_remove.py @@ -0,0 +1,134 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from infrahub.core.constants import RelationshipStatus +from infrahub.core.graph.schema import GraphAttributeRelationships +from infrahub.core.query import Query +from infrahub.core.schema.generic_schema import GenericSchema + +if TYPE_CHECKING: + from pydantic.fields import FieldInfo + + from infrahub.database import InfrahubDatabase + + +class AttributeRemoveQuery(Query): + name = "attribute_remove" + insert_return: bool = False + + def __init__( + self, + attribute_name: str, + node_kinds: list[str], + **kwargs: Any, + ) -> None: + self.attribute_name = attribute_name + self.node_kinds = node_kinds + super().__init__(**kwargs) + + async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002 + branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) + self.params.update(branch_params) + + kinds_to_ignore = [] + profile_kinds_to_update = [] + + for node_kind in self.node_kinds: + new_schema = db.schema.get(name=node_kind, branch=self.branch, duplicate=False) + + if isinstance(new_schema, GenericSchema): + for inheriting_schema_kind in new_schema.used_by: + node_schema = db.schema.get_node_schema( + name=inheriting_schema_kind, branch=self.branch, duplicate=False + ) + attr_schema = node_schema.get_attribute_or_none(name=self.attribute_name) + if attr_schema and not attr_schema.inherited: + kinds_to_ignore.append(inheriting_schema_kind) + else: + profile_kinds_to_update.append(f"Profile{inheriting_schema_kind}") + + self.params["kinds_to_ignore"] = kinds_to_ignore + self.params["attr_name"] = self.attribute_name + self.params["current_time"] = self.at.to_string() + self.params["branch_name"] = self.branch.name + + self.params["rel_props"] = { + "branch": self.branch.name, + "branch_level": self.branch.hierarchy_level, + "status": RelationshipStatus.DELETED.value, + "from": self.at.to_string(), + } + + def render_sub_query_per_rel_type(rel_type: str, rel_def: FieldInfo) -> str: + subquery = [ + "WITH peer_node, rb, active_attr", + f'WHERE type(rb) = "{rel_type}"', + ] + if rel_def.default.direction.value == "outbound": + subquery.append(f"CREATE (active_attr)-[:{rel_type} $rel_props ]->(peer_node)") + elif rel_def.default.direction.value == "inbound": + subquery.append(f"CREATE (active_attr)<-[:{rel_type} $rel_props ]-(peer_node)") + else: + subquery.append(f"CREATE (active_attr)-[:{rel_type} $rel_props ]-(peer_node)") + + subquery.append("RETURN peer_node as p2") + return "\n".join(subquery) + + sub_queries = [ + render_sub_query_per_rel_type(rel_type, rel_def) + for rel_type, rel_def in GraphAttributeRelationships.model_fields.items() + ] + sub_query_all = "\nUNION\n".join(sub_queries) + + node_kinds_str = "|".join(self.node_kinds + profile_kinds_to_update) + query = """ + // Find all the active nodes + MATCH (node:%(node_kinds)s) + WHERE (size($kinds_to_ignore) = 0 OR NOT any(l IN labels(node) WHERE l IN $kinds_to_ignore)) + AND exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name })) + CALL (node) { + MATCH (root:Root)<-[r:IS_PART_OF]-(node) + WHERE %(branch_filter)s + RETURN node as n1, r as r1 + ORDER BY r.branch_level DESC, r.from DESC + LIMIT 1 + } + WITH n1 as active_node, r1 as rb + WHERE rb.status = "active" + // Find all the attributes that need to be updated + CALL (active_node) { + MATCH (active_node)-[r:HAS_ATTRIBUTE]-(attr:Attribute { name: $attr_name }) + WHERE %(branch_filter)s + RETURN active_node as n1, r as r1, attr as attr1 + ORDER BY r.branch_level DESC, r.from DESC + LIMIT 1 + } + WITH n1 as active_node, r1 as rb, attr1 as active_attr + WHERE rb.status = "active" + WITH active_attr + MATCH (active_attr)-[]-(peer) + WITH DISTINCT active_attr, peer + CALL (active_attr, peer) { + MATCH (active_attr)-[r]-(peer) + WHERE %(branch_filter)s + RETURN active_attr as a1, r as r1, peer as p1 + ORDER BY r.branch_level DESC, r.from DESC + LIMIT 1 + } + WITH a1 as active_attr, r1 as rb, p1 as peer_node + WHERE rb.status = "active" + CALL (peer_node, rb, active_attr) { + %(sub_query_all)s + } + WITH p2 as peer_node, rb, active_attr + FOREACH (i in CASE WHEN rb.branch = $branch_name THEN [1] ELSE [] END | + SET rb.to = $current_time + ) + RETURN DISTINCT active_attr + """ % { + "branch_filter": branch_filter, + "sub_query_all": sub_query_all, + "node_kinds": node_kinds_str, + } + self.add_to_query(query) diff --git a/backend/infrahub/core/migrations/schema/attribute_kind_update.py b/backend/infrahub/core/migrations/schema/attribute_kind_update.py index f9a0f19e0f..636a46ae7a 100644 --- a/backend/infrahub/core/migrations/schema/attribute_kind_update.py +++ b/backend/infrahub/core/migrations/schema/attribute_kind_update.py @@ -4,7 +4,7 @@ from infrahub.types import is_large_attribute_type -from ..query import AttributeMigrationQuery +from ..query import AttributeMigrationQuery, MigrationBaseQuery from ..shared import AttributeSchemaMigration, MigrationResult if TYPE_CHECKING: @@ -147,10 +147,16 @@ class AttributeKindUpdateMigration(AttributeSchemaMigration): name: str = "attribute.kind.update" queries: Sequence[type[AttributeMigrationQuery]] = [AttributeKindUpdateMigrationQuery] # type: ignore[assignment] - async def execute(self, db: InfrahubDatabase, branch: Branch, at: Timestamp | str | None = None) -> MigrationResult: + async def execute( + self, + db: InfrahubDatabase, + branch: Branch, + at: Timestamp | str | None = None, + queries: Sequence[type[MigrationBaseQuery]] | None = None, + ) -> MigrationResult: is_indexed_previous = is_large_attribute_type(self.previous_attribute_schema.kind) is_indexed_new = is_large_attribute_type(self.new_attribute_schema.kind) if is_indexed_previous is is_indexed_new: return MigrationResult() - return await super().execute(db=db, branch=branch, at=at) + return await super().execute(db=db, branch=branch, at=at, queries=queries) diff --git a/backend/infrahub/core/migrations/schema/attribute_supports_profile.py b/backend/infrahub/core/migrations/schema/attribute_supports_profile.py new file mode 100644 index 0000000000..5b3d855c5e --- /dev/null +++ b/backend/infrahub/core/migrations/schema/attribute_supports_profile.py @@ -0,0 +1,90 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Sequence + +from infrahub.core.migrations.query.attribute_remove import AttributeRemoveQuery +from infrahub.core.schema.generic_schema import GenericSchema +from infrahub.core.schema.node_schema import NodeSchema + +from ..query import AttributeMigrationQuery, MigrationBaseQuery +from ..query.attribute_add import AttributeAddQuery +from ..shared import AttributeSchemaMigration, MigrationResult + +if TYPE_CHECKING: + from infrahub.core.branch.models import Branch + from infrahub.core.schema import MainSchemaTypes + from infrahub.core.timestamp import Timestamp + from infrahub.database import InfrahubDatabase + + +def _get_node_kinds(schema: MainSchemaTypes) -> list[str]: + if not isinstance(schema, (NodeSchema, GenericSchema)): + return [schema.kind] + schema_kinds = [f"Profile{schema.kind}"] + if isinstance(schema, GenericSchema) and schema.used_by: + schema_kinds += [f"Profile{kind}" for kind in schema.used_by] + return schema_kinds + + +class ProfilesAttributeAddMigrationQuery(AttributeMigrationQuery, AttributeAddQuery): + name = "migration_profiles_attribute_add" + + def __init__( + self, + migration: AttributeSchemaMigration, + **kwargs: Any, + ): + node_kinds = _get_node_kinds(migration.new_schema) + super().__init__( + migration=migration, + node_kinds=node_kinds, + attribute_name=migration.new_attribute_schema.name, + attribute_kind=migration.new_attribute_schema.kind, + branch_support=migration.new_attribute_schema.get_branch(), + default_value=migration.new_attribute_schema.default_value, + **kwargs, + ) + + +class ProfilesAttributeRemoveMigrationQuery(AttributeMigrationQuery, AttributeRemoveQuery): + name = "migration_profiles_attribute_remove" + + def __init__( + self, + migration: AttributeSchemaMigration, + **kwargs: Any, + ): + node_kinds = _get_node_kinds(migration.new_schema) + super().__init__( + migration=migration, + attribute_name=migration.new_attribute_schema.name, + node_kinds=node_kinds, + **kwargs, + ) + + +class AttributeSupportsProfileUpdateMigration(AttributeSchemaMigration): + name: str = "attribute.supports_profile.update" + queries: Sequence[type[MigrationBaseQuery]] = [] + + async def execute( + self, + db: InfrahubDatabase, + branch: Branch, + at: Timestamp | str | None = None, + queries: Sequence[type[MigrationBaseQuery]] | None = None, # noqa: ARG002 + ) -> MigrationResult: + if ( + # no change in whether the attribute should be used on profiles + self.previous_attribute_schema.support_profiles == self.new_attribute_schema.support_profiles + # the attribute is new, so there cannot be existing profiles to update + or self.previous_attribute_schema.id is None + ): + return MigrationResult() + profiles_queries: list[type[AttributeMigrationQuery]] = [] + if self.new_attribute_schema.support_profiles: + profiles_queries.append(ProfilesAttributeAddMigrationQuery) + if not self.new_attribute_schema.support_profiles: + profiles_queries.append(ProfilesAttributeRemoveMigrationQuery) + + return await super().execute(db=db, branch=branch, at=at, queries=profiles_queries) diff --git a/backend/infrahub/core/migrations/schema/node_attribute_add.py b/backend/infrahub/core/migrations/schema/node_attribute_add.py index 3052b3fe19..e3f3d99189 100644 --- a/backend/infrahub/core/migrations/schema/node_attribute_add.py +++ b/backend/infrahub/core/migrations/schema/node_attribute_add.py @@ -4,15 +4,19 @@ from infrahub.core import registry from infrahub.core.node import Node +from infrahub.core.schema.generic_schema import GenericSchema +from infrahub.core.schema.node_schema import NodeSchema from infrahub.exceptions import PoolExhaustedError from infrahub.tasks.registry import update_branch_registry -from ..query import AttributeMigrationQuery +from ..query import AttributeMigrationQuery, MigrationBaseQuery from ..query.attribute_add import AttributeAddQuery from ..shared import AttributeSchemaMigration, MigrationResult if TYPE_CHECKING: from infrahub.core.node.resource_manager.number_pool import CoreNumberPool + from infrahub.core.schema import MainSchemaTypes + from infrahub.core.schema.attribute_schema import AttributeSchema from infrahub.database import InfrahubDatabase from ...branch import Branch @@ -22,14 +26,27 @@ class NodeAttributeAddMigrationQuery01(AttributeMigrationQuery, AttributeAddQuery): name = "migration_node_attribute_add_01" + def _get_node_kinds(self, schema: MainSchemaTypes, new_attribute_schema: AttributeSchema) -> list[str]: + schema_kinds = [schema.kind] + if not isinstance(schema, (NodeSchema, GenericSchema)): + return schema_kinds + if new_attribute_schema.support_profiles: + schema_kinds.append(f"Profile{schema.kind}") + if isinstance(schema, GenericSchema) and schema.used_by: + schema_kinds.extend([f"Profile{kind}" for kind in schema.used_by]) + return schema_kinds + def __init__( self, migration: AttributeSchemaMigration, **kwargs: Any, ): + node_kinds = self._get_node_kinds( + schema=migration.new_schema, new_attribute_schema=migration.new_attribute_schema + ) super().__init__( migration=migration, - node_kind=migration.new_schema.kind, + node_kinds=node_kinds, attribute_name=migration.new_attribute_schema.name, attribute_kind=migration.new_attribute_schema.kind, branch_support=migration.new_attribute_schema.get_branch().value, @@ -42,6 +59,17 @@ class NodeAttributeAddMigration(AttributeSchemaMigration): name: str = "node.attribute.add" queries: Sequence[type[AttributeMigrationQuery]] = [NodeAttributeAddMigrationQuery01] # type: ignore[assignment] + async def execute( + self, + db: InfrahubDatabase, + branch: Branch, + at: Timestamp | str | None = None, + queries: Sequence[type[MigrationBaseQuery]] | None = None, + ) -> MigrationResult: + if self.new_attribute_schema.inherited is True: + return MigrationResult() + return await super().execute(db=db, branch=branch, at=at, queries=queries) + async def execute_post_queries( self, db: InfrahubDatabase, diff --git a/backend/infrahub/core/migrations/schema/node_attribute_remove.py b/backend/infrahub/core/migrations/schema/node_attribute_remove.py index d0b57e6852..aa5524151d 100644 --- a/backend/infrahub/core/migrations/schema/node_attribute_remove.py +++ b/backend/infrahub/core/migrations/schema/node_attribute_remove.py @@ -1,123 +1,27 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Sequence - -from infrahub.core.constants import RelationshipStatus -from infrahub.core.graph.schema import GraphAttributeRelationships -from infrahub.core.schema.generic_schema import GenericSchema +from typing import Any, Sequence from ..query import AttributeMigrationQuery +from ..query.attribute_remove import AttributeRemoveQuery from ..shared import AttributeSchemaMigration -if TYPE_CHECKING: - from pydantic.fields import FieldInfo - - from infrahub.database import InfrahubDatabase - -class NodeAttributeRemoveMigrationQuery01(AttributeMigrationQuery): +class NodeAttributeRemoveMigrationQuery01(AttributeMigrationQuery, AttributeRemoveQuery): name = "migration_node_attribute_remove_01" insert_return: bool = False - async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002 - branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) - self.params.update(branch_params) - - attr_name = self.migration.schema_path.field_name - kinds_to_ignore = [] - if isinstance(self.migration.new_node_schema, GenericSchema) and attr_name is not None: - for inheriting_schema_kind in self.migration.new_node_schema.used_by: - node_schema = db.schema.get_node_schema( - name=inheriting_schema_kind, branch=self.branch, duplicate=False - ) - attr_schema = node_schema.get_attribute_or_none(name=attr_name) - if attr_schema and not attr_schema.inherited: - kinds_to_ignore.append(inheriting_schema_kind) - - self.params["node_kind"] = self.migration.new_schema.kind - self.params["kinds_to_ignore"] = kinds_to_ignore - self.params["attr_name"] = attr_name - self.params["current_time"] = self.at.to_string() - self.params["branch_name"] = self.branch.name - self.params["branch_support"] = self.migration.previous_attribute_schema.get_branch().value - - self.params["rel_props"] = { - "branch": self.branch.name, - "branch_level": self.branch.hierarchy_level, - "status": RelationshipStatus.DELETED.value, - "from": self.at.to_string(), - } - - def render_sub_query_per_rel_type(rel_type: str, rel_def: FieldInfo) -> str: - subquery = [ - "WITH peer_node, rb, active_attr", - f'WHERE type(rb) = "{rel_type}"', - ] - if rel_def.default.direction.value == "outbound": - subquery.append(f"CREATE (active_attr)-[:{rel_type} $rel_props ]->(peer_node)") - elif rel_def.default.direction.value == "inbound": - subquery.append(f"CREATE (active_attr)<-[:{rel_type} $rel_props ]-(peer_node)") - else: - subquery.append(f"CREATE (active_attr)-[:{rel_type} $rel_props ]-(peer_node)") - - subquery.append("RETURN peer_node as p2") - return "\n".join(subquery) - - sub_queries = [ - render_sub_query_per_rel_type(rel_type, rel_def) - for rel_type, rel_def in GraphAttributeRelationships.model_fields.items() - ] - sub_query_all = "\nUNION\n".join(sub_queries) - - query = """ - // Find all the active nodes - MATCH (node:%(node_kind)s) - WHERE (size($kinds_to_ignore) = 0 OR NOT any(l IN labels(node) WHERE l IN $kinds_to_ignore)) - AND exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name })) - CALL (node) { - MATCH (root:Root)<-[r:IS_PART_OF]-(node) - WHERE %(branch_filter)s - RETURN node as n1, r as r1 - ORDER BY r.branch_level DESC, r.from DESC - LIMIT 1 - } - WITH n1 as active_node, r1 as rb - WHERE rb.status = "active" - // Find all the attributes that need to be updated - CALL (active_node) { - MATCH (active_node)-[r:HAS_ATTRIBUTE]-(attr:Attribute { name: $attr_name }) - WHERE %(branch_filter)s - RETURN active_node as n1, r as r1, attr as attr1 - ORDER BY r.branch_level DESC, r.from DESC - LIMIT 1 - } - WITH n1 as active_node, r1 as rb, attr1 as active_attr - WHERE rb.status = "active" - WITH active_attr - MATCH (active_attr)-[]-(peer) - CALL (active_attr, peer) { - MATCH (active_attr)-[r]-(peer) - WHERE %(branch_filter)s - RETURN active_attr as a1, r as r1, peer as p1 - ORDER BY r.branch_level DESC, r.from DESC - LIMIT 1 - } - WITH a1 as active_attr, r1 as rb, p1 as peer_node - WHERE rb.status = "active" - CALL (peer_node, rb, active_attr) { - %(sub_query_all)s - } - WITH p2 as peer_node, rb, active_attr - FOREACH (i in CASE WHEN rb.branch = $branch_name THEN [1] ELSE [] END | - SET rb.to = $current_time + def __init__( + self, + migration: AttributeSchemaMigration, + **kwargs: Any, + ): + super().__init__( + migration=migration, + attribute_name=migration.previous_attribute_schema.name, + node_kinds=[migration.new_schema.kind], + **kwargs, ) - RETURN DISTINCT active_attr - """ % { - "branch_filter": branch_filter, - "sub_query_all": sub_query_all, - "node_kind": self.migration.new_schema.kind, - } - self.add_to_query(query) class NodeAttributeRemoveMigration(AttributeSchemaMigration): diff --git a/backend/infrahub/core/migrations/schema/node_kind_update.py b/backend/infrahub/core/migrations/schema/node_kind_update.py index 1df36ec90f..5c0db1b789 100644 --- a/backend/infrahub/core/migrations/schema/node_kind_update.py +++ b/backend/infrahub/core/migrations/schema/node_kind_update.py @@ -2,8 +2,9 @@ from typing import Any, Sequence +from ..query import MigrationQuery from ..query.node_duplicate import NodeDuplicateQuery, SchemaNodeInfo -from ..shared import MigrationQuery, SchemaMigration +from ..shared import SchemaMigration class NodeKindUpdateMigrationQuery01(MigrationQuery, NodeDuplicateQuery): diff --git a/backend/infrahub/core/migrations/schema/node_remove.py b/backend/infrahub/core/migrations/schema/node_remove.py index 98329235fa..6b784e3fcd 100644 --- a/backend/infrahub/core/migrations/schema/node_remove.py +++ b/backend/infrahub/core/migrations/schema/node_remove.py @@ -5,7 +5,8 @@ from infrahub.core.constants import RelationshipStatus from infrahub.core.graph.schema import GraphNodeRelationships, GraphRelDirection -from ..shared import MigrationQuery, SchemaMigration +from ..query import MigrationQuery +from ..shared import SchemaMigration if TYPE_CHECKING: from pydantic.fields import FieldInfo diff --git a/backend/infrahub/core/migrations/schema/placeholder_dummy.py b/backend/infrahub/core/migrations/schema/placeholder_dummy.py index 736a8d2972..aef98ed429 100644 --- a/backend/infrahub/core/migrations/schema/placeholder_dummy.py +++ b/backend/infrahub/core/migrations/schema/placeholder_dummy.py @@ -4,9 +4,10 @@ from pydantic import Field -from ..shared import MigrationQuery, SchemaMigration +from ..query import MigrationBaseQuery # noqa: TC001 +from ..shared import SchemaMigration class PlaceholderDummyMigration(SchemaMigration): name: str = "dummy.placeholder" - queries: Sequence[type[MigrationQuery]] = Field(default_factory=list) + queries: Sequence[type[MigrationBaseQuery]] = Field(default_factory=list) diff --git a/backend/infrahub/core/migrations/shared.py b/backend/infrahub/core/migrations/shared.py index a5df7af66f..bb42a13e83 100644 --- a/backend/infrahub/core/migrations/shared.py +++ b/backend/infrahub/core/migrations/shared.py @@ -17,7 +17,7 @@ ) from infrahub.core.timestamp import Timestamp -from .query import MigrationQuery # noqa: TC001 +from .query import MigrationBaseQuery # noqa: TC001 if TYPE_CHECKING: from infrahub.core.branch import Branch @@ -40,7 +40,9 @@ def success(self) -> bool: class SchemaMigration(BaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) name: str = Field(..., description="Name of the migration") - queries: Sequence[type[MigrationQuery]] = Field(..., description="List of queries to execute for this migration") + queries: Sequence[type[MigrationBaseQuery]] = Field( + ..., description="List of queries to execute for this migration" + ) new_node_schema: MainSchemaTypes | None = None previous_node_schema: MainSchemaTypes | None = None @@ -65,9 +67,14 @@ async def execute_post_queries( return result async def execute_queries( - self, db: InfrahubDatabase, result: MigrationResult, branch: Branch, at: Timestamp + self, + db: InfrahubDatabase, + result: MigrationResult, + branch: Branch, + at: Timestamp, + queries: Sequence[type[MigrationBaseQuery]], ) -> MigrationResult: - for migration_query in self.queries: + for migration_query in queries: try: query = await migration_query.init(db=db, branch=branch, at=at, migration=self) await query.execute(db=db) @@ -78,13 +85,20 @@ async def execute_queries( return result - async def execute(self, db: InfrahubDatabase, branch: Branch, at: Timestamp | str | None = None) -> MigrationResult: + async def execute( + self, + db: InfrahubDatabase, + branch: Branch, + at: Timestamp | str | None = None, + queries: Sequence[type[MigrationBaseQuery]] | None = None, + ) -> MigrationResult: async with db.start_transaction() as ts: result = MigrationResult() at = Timestamp(at) await self.execute_pre_queries(db=ts, result=result, branch=branch, at=at) - await self.execute_queries(db=ts, result=result, branch=branch, at=at) + queries_to_execute = queries or self.queries + await self.execute_queries(db=ts, result=result, branch=branch, at=at, queries=queries_to_execute) await self.execute_post_queries(db=ts, result=result, branch=branch, at=at) return result diff --git a/backend/infrahub/core/query/node.py b/backend/infrahub/core/query/node.py index 3638ca27a6..2d6bcb212f 100644 --- a/backend/infrahub/core/query/node.py +++ b/backend/infrahub/core/query/node.py @@ -246,11 +246,15 @@ async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: # noqa: ARG ipnetwork_prop_list = [f"{key}: {value}" for key, value in ipnetwork_prop.items()] attrs_nonindexed_query = """ - WITH distinct n + WITH DISTINCT n UNWIND $attrs AS attr // Try to find a matching vertex - OPTIONAL MATCH (existing_av:AttributeValue {value: attr.content.value, is_default: attr.content.is_default}) - WHERE NOT existing_av:AttributeValueIndexed + CALL (attr) { + OPTIONAL MATCH (existing_av:AttributeValue {value: attr.content.value, is_default: attr.content.is_default}) + WHERE NOT existing_av:AttributeValueIndexed + RETURN existing_av + LIMIT 1 + } CALL (attr, existing_av) { // If none found, create a new one WITH existing_av diff --git a/backend/infrahub/core/schema/attribute_schema.py b/backend/infrahub/core/schema/attribute_schema.py index 01339ed2f2..729a975beb 100644 --- a/backend/infrahub/core/schema/attribute_schema.py +++ b/backend/infrahub/core/schema/attribute_schema.py @@ -68,6 +68,10 @@ def is_relationship(self) -> bool: def is_deprecated(self) -> bool: return bool(self.deprecation) + @property + def support_profiles(self) -> bool: + return self.read_only is False and self.optional is True + def get_id(self) -> str: if self.id is None: raise InitializationError("The attribute schema has not been saved yet and doesn't have an id") diff --git a/backend/infrahub/core/schema/definitions/internal.py b/backend/infrahub/core/schema/definitions/internal.py index 6ccb4fe925..bdd9437eea 100644 --- a/backend/infrahub/core/schema/definitions/internal.py +++ b/backend/infrahub/core/schema/definitions/internal.py @@ -568,7 +568,7 @@ def to_dict(self) -> dict[str, Any]: "Mainly relevant for internal object.", default_value=False, optional=True, - extra={"update": UpdateSupport.ALLOWED}, + extra={"update": UpdateSupport.MIGRATION_REQUIRED}, ), SchemaAttribute( name="unique", @@ -585,7 +585,7 @@ def to_dict(self) -> dict[str, Any]: default_value=False, override_default_value=False, optional=True, - extra={"update": UpdateSupport.VALIDATE_CONSTRAINT}, + extra={"update": UpdateSupport.MIGRATION_REQUIRED}, ), SchemaAttribute( name="branch", diff --git a/backend/infrahub/core/schema/generated/attribute_schema.py b/backend/infrahub/core/schema/generated/attribute_schema.py index f2cfbdbfa6..f0a1a42a6f 100644 --- a/backend/infrahub/core/schema/generated/attribute_schema.py +++ b/backend/infrahub/core/schema/generated/attribute_schema.py @@ -78,7 +78,7 @@ class GeneratedAttributeSchema(HashableModel): read_only: bool = Field( default=False, description="Set the attribute as Read-Only, users won't be able to change its value. Mainly relevant for internal object.", - json_schema_extra={"update": "allowed"}, + json_schema_extra={"update": "migration_required"}, ) unique: bool = Field( default=False, @@ -88,7 +88,7 @@ class GeneratedAttributeSchema(HashableModel): optional: bool = Field( default=False, description="Indicate if this attribute is mandatory or optional.", - json_schema_extra={"update": "validate_constraint"}, + json_schema_extra={"update": "migration_required"}, ) branch: BranchSupportType | None = Field( default=None, diff --git a/backend/infrahub/core/schema/schema_branch.py b/backend/infrahub/core/schema/schema_branch.py index 7cf3299e68..35f514d870 100644 --- a/backend/infrahub/core/schema/schema_branch.py +++ b/backend/infrahub/core/schema/schema_branch.py @@ -328,14 +328,23 @@ def get(self, name: str, duplicate: bool = True) -> MainSchemaTypes: elif name in self.templates: key = self.templates[name] - if key and duplicate: - return self._cache[key].duplicate() - if key and not duplicate: - return self._cache[key] + if not key: + raise SchemaNotFoundError( + branch_name=self.name, identifier=name, message=f"Unable to find the schema {name!r} in the registry" + ) - raise SchemaNotFoundError( - branch_name=self.name, identifier=name, message=f"Unable to find the schema {name!r} in the registry" - ) + schema: MainSchemaTypes | None = None + try: + schema = self._cache[key] + except KeyError: + pass + + if not schema: + raise ValueError(f"Schema {name!r} on branch {self.name} has incorrect hash: {key!r}") + + if duplicate: + return schema.duplicate() + return schema def get_node(self, name: str, duplicate: bool = True) -> NodeSchema: """Access a specific NodeSchema, defined by its kind.""" @@ -565,6 +574,7 @@ def process_post_validation(self) -> None: self.process_dropdowns() self.process_relationships() self.process_human_friendly_id() + self.register_human_friendly_id() def _generate_identifier_string(self, node_kind: str, peer_kind: str) -> str: return "__".join(sorted([node_kind, peer_kind])).lower() @@ -900,7 +910,6 @@ def _is_attr_combination_unique( return False def validate_human_friendly_id(self) -> None: - self.hfids = HFIDs() for name in self.generic_names_without_templates + self.node_names: node_schema = self.get(name=name, duplicate=False) @@ -934,11 +943,6 @@ def validate_human_friendly_id(self) -> None: rel_schemas_to_paths[rel_identifier] = (schema_path.related_schema, []) rel_schemas_to_paths[rel_identifier][1].append(schema_path.attribute_path_as_str) - if node_schema.is_node_schema and node_schema.namespace not in ["Schema", "Internal"]: - self.hfids.register_hfid_schema_path( - kind=node_schema.kind, schema_path=schema_path, hfid=node_schema.human_friendly_id - ) - if config.SETTINGS.main.schema_strict_mode: # For every relationship referred within hfid, check whether the combination of attributes is unique is the peer schema node for related_schema, attrs_paths in rel_schemas_to_paths.values(): @@ -1540,6 +1544,34 @@ def process_human_friendly_id(self) -> None: node.uniqueness_constraints = [hfid_uniqueness_constraint] self.set(name=node.kind, schema=node) + def register_human_friendly_id(self) -> None: + """Register HFID automations + + Register the HFIDs after all processing and validation has been done. + """ + + self.hfids = HFIDs() + for name in self.generic_names_without_templates + self.node_names: + node_schema = self.get(name=name, duplicate=False) + + if not node_schema.human_friendly_id: + continue + + allowed_types = SchemaElementPathType.ATTR_WITH_PROP | SchemaElementPathType.REL_ONE_MANDATORY_ATTR + + for hfid_path in node_schema.human_friendly_id: + schema_path = self.validate_schema_path( + node_schema=node_schema, + path=hfid_path, + allowed_path_types=allowed_types, + element_name="human_friendly_id", + ) + + if node_schema.is_node_schema and node_schema.namespace not in ["Schema", "Internal"]: + self.hfids.register_hfid_schema_path( + kind=node_schema.kind, schema_path=schema_path, hfid=node_schema.human_friendly_id + ) + def process_hierarchy(self) -> None: for name in self.nodes.keys(): node = self.get_node(name=name, duplicate=False) @@ -2255,7 +2287,7 @@ def generate_profile_from_node(self, node: NodeSchema) -> ProfileSchema: ) for node_attr in node.attributes: - if node_attr.read_only or node_attr.optional is False: + if not node_attr.support_profiles: continue attr_schema_class = get_attribute_schema_class_for_kind(kind=node_attr.kind) attr = attr_schema_class( diff --git a/backend/infrahub/display_labels/tasks.py b/backend/infrahub/display_labels/tasks.py index 34981d870b..d4d2bd481c 100644 --- a/backend/infrahub/display_labels/tasks.py +++ b/backend/infrahub/display_labels/tasks.py @@ -152,7 +152,7 @@ async def display_labels_setup_jinja2( @flow( name="trigger-update-display-labels", - flow_run_name="Trigger updates for display labels for kind", + flow_run_name="Trigger updates for display labels for {kind}", ) async def trigger_update_display_labels( branch_name: str, diff --git a/backend/infrahub/hfid/tasks.py b/backend/infrahub/hfid/tasks.py index 4575804d5c..e19b766f09 100644 --- a/backend/infrahub/hfid/tasks.py +++ b/backend/infrahub/hfid/tasks.py @@ -151,7 +151,7 @@ async def hfid_setup(context: InfrahubContext, branch_name: str | None = None, e @flow( name="trigger-update-hfid", - flow_run_name="Trigger updates for display labels for kind", + flow_run_name="Trigger updates for HFID for {kind}", ) async def trigger_update_hfid( branch_name: str, diff --git a/backend/tests/benchmark/test_graphql_query.py b/backend/tests/benchmark/test_graphql_query.py index ba846d2a9d..d19f47652b 100644 --- a/backend/tests/benchmark/test_graphql_query.py +++ b/backend/tests/benchmark/test_graphql_query.py @@ -73,10 +73,8 @@ def test_query_one_model(exec_async, aio_benchmark, db: InfrahubDatabase, defaul } } """ - - gql_params = exec_async( - prepare_graphql_params, db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = exec_async(prepare_graphql_params, db=db, branch=default_branch) for _ in range(NBR_WARMUP): exec_async( @@ -123,10 +121,8 @@ def test_query_rel_many(exec_async, aio_benchmark, db: InfrahubDatabase, default } } """ - - gql_params = exec_async( - prepare_graphql_params, db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = exec_async(prepare_graphql_params, db=db, branch=default_branch) for _ in range(NBR_WARMUP): exec_async( @@ -174,9 +170,8 @@ def test_query_rel_one(exec_async, aio_benchmark, db: InfrahubDatabase, default_ } """ - gql_params = exec_async( - prepare_graphql_params, db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = exec_async(prepare_graphql_params, db=db, branch=default_branch) for _ in range(NBR_WARMUP): exec_async( @@ -223,7 +218,7 @@ def test_query_rel_one(exec_async, aio_benchmark, db: InfrahubDatabase, default_ # """ # gql_params = exec_async( -# prepare_graphql_params, db=db, include_mutation=False, include_subscription=False, branch=default_branch +# prepare_graphql_params, db=db, branch=default_branch # ) # aio_benchmark( # graphql, diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/.gitignore b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/.gitignore new file mode 100644 index 0000000000..16e0ef9db7 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/.gitignore @@ -0,0 +1,10 @@ +.vscode/* +*.pyc +*.tar.gz +**/.DS_Store +playbooks/documentation +playbooks/intended + +# Direnv files (https://direnv.net/) +.direnv/ +.envrc diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/.infrahub.yml b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/.infrahub.yml new file mode 100644 index 0000000000..50bd441c9a --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/.infrahub.yml @@ -0,0 +1,97 @@ +--- +schemas: + - schemas/demo_edge_fabric.yml + +jinja2_transforms: + - name: "device_startup" + description: "Template to generate startup configuration for network devices" + query: "device_startup_info" + template_path: "templates/device_startup_config.tpl.j2" + + - name: "clab_topology" + query: "topology_info" + template_path: "topology/topology.tpl.j2" + +artifact_definitions: + - name: "Openconfig Interface for Arista devices" + artifact_name: "openconfig-interfaces" + parameters: + device: "name__value" + content_type: "application/json" + targets: "arista_devices" + transformation: "OCInterfaces" + + - name: "Startup Config for Edge devices" + artifact_name: "startup-config" + parameters: + device: "name__value" + content_type: "text/plain" + targets: "edge_router" + transformation: "device_startup" + +check_definitions: + - name: backbone_link_redundancy + class_name: InfrahubCheckBackboneLinkRedundancy + file_path: "checks/check_backbone_link_redundancy.py" + +python_transforms: + - name: OCInterfaces + class_name: OCInterfaces + file_path: "transforms/openconfig.py" + - name: oc_bgp_neighbors + class_name: OCBGPNeighbors + file_path: "transforms/openconfig.py" + - name: computed_circuit_description + class_name: ComputedCircuitDescription + file_path: "transforms/computed_circuit_description.py" + +generator_definitions: + - name: update_upstream_interfaces_description + file_path: "generators/upstream_interfaces.py" + targets: upstream_interfaces + query: upstream_interfaces + parameters: + id: "id" + + - name: create_circuit_endpoints + file_path: "generators/circuit_endpoints.py" + targets: provisioning_circuits + query: circuit_endpoints + parameters: + circuit_id: "circuit_id__value" + + - name: drained_circuit_bgp_sessions + file_path: "generators/drained_circuit_bgp_sessions.py" + targets: maintenance_circuits + query: drained_circuit_bgp_sessions + parameters: + circuit_id: "circuit_id__value" + + - name: backbone_service + file_path: "generators/backbone_service.py" + targets: backbone_services + query: backbone_service + parameters: + name: "name__value" + +queries: + - name: topology_info + file_path: "topology/topology_info.gql" + - name: check_backbone_link_redundancy + file_path: "checks/check_backbone_link_redundancy.gql" + - name: oc_bgp_neighbors + file_path: "transforms/oc_bgp_neighbors.gql" + - name: oc_interfaces + file_path: "transforms/oc_interfaces.gql" + - name: device_startup_info + file_path: "templates/device_startup_info.gql" + - name: upstream_interfaces + file_path: "generators/upstream_interfaces.gql" + - name: drained_circuit_bgp_sessions + file_path: "generators/drained_circuit_bgp_sessions.gql" + - name: circuit_endpoints + file_path: "generators/circuit_endpoints.gql" + - name: backbone_service + file_path: "generators/backbone_service.gql" + - name: computed_circuit_description + file_path: "transforms/computed_circuit_description.gql" diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/checks/check_backbone_link_redundancy.gql b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/checks/check_backbone_link_redundancy.gql new file mode 100644 index 0000000000..9410db1b38 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/checks/check_backbone_link_redundancy.gql @@ -0,0 +1,41 @@ +query check_backbone_link_redundancy { + InfraCircuit(role__value: "backbone") { + edges { + node { + id + circuit_id { + value + } + vendor_id { + value + } + status { + value + } + endpoints { + edges { + node { + site { + node { + id + name { + value + } + } + } + connected_endpoint { + node { + ... on InfraInterface { + enabled { + value + } + } + } + } + } + } + } + } + } + } +} diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/checks/check_backbone_link_redundancy.py b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/checks/check_backbone_link_redundancy.py new file mode 100644 index 0000000000..d41cc5250f --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/checks/check_backbone_link_redundancy.py @@ -0,0 +1,44 @@ +from collections import defaultdict + +from infrahub_sdk.checks import InfrahubCheck + + +class InfrahubCheckBackboneLinkRedundancy(InfrahubCheck): + query = "check_backbone_link_redundancy" + + def validate(self, data): + site_id_by_name = {} + + backbone_links_per_site = defaultdict(lambda: defaultdict(int)) + + if data["InfraCircuit"]["edges"]: # noqa: PLR1702 + circuits = data["InfraCircuit"]["edges"] + + for circuit in circuits: + circuit_node = circuit["node"] + circuit_status = circuit_node["status"]["value"] + + if circuit_node["endpoints"]["edges"]: + endpoints = circuit_node["endpoints"]["edges"] + + for endpoint in endpoints: + endpoint_node = endpoint["node"] + site_name = endpoint_node["site"]["node"]["name"]["value"] + + site_node = endpoint_node["site"]["node"] + site_id_by_name[site_name] = site_node["id"] + backbone_links_per_site[site_name]["total"] += 1 + + if endpoint_node["connected_endpoint"]: + connected_endpoint_node = endpoint_node["connected_endpoint"]["node"] + if connected_endpoint_node: + if connected_endpoint_node["enabled"]["value"] and circuit_status == "active": + backbone_links_per_site[site_name]["operational"] += 1 + + for site_name, site in backbone_links_per_site.items(): + if site.get("operational", 0) / site["total"] < 0.6: + self.log_error( + message=f"{site_name} has less than 60% of backbone circuit operational ({site.get('operational', 0)}/{site['total']})", + object_id=site_id_by_name[site_name], + object_type="site", + ) diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/backbone_service.gql b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/backbone_service.gql new file mode 100644 index 0000000000..1f9e467e67 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/backbone_service.gql @@ -0,0 +1,33 @@ +query backbone_service($name: String!) { + InfraBackBoneService(name__value: $name) { + edges { + node { + id + name { + value + } + circuit_id { + value + } + internal_circuit_id { + value + } + site_a { + node { + id + } + } + site_b { + node { + id + } + } + provider { + node { + id + } + } + } + } + } +} diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/backbone_service.py b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/backbone_service.py new file mode 100644 index 0000000000..25c8e14cb8 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/backbone_service.py @@ -0,0 +1,138 @@ +import logging + +from infrahub_sdk.generator import InfrahubGenerator + + +async def find_interface(client, site_id): + # Retrieve all 'edge' router from the site + devices = await client.filters(kind="InfraDevice", role__value="edge", site__ids=[site_id]) + + if len(devices) == 0: + raise ValueError("Couldn't find devices") + + # Take the first one of the list + device = devices[0] + + # Retrieve all L3 interfaces from this Device with 'backbone' role + interfaces = await client.filters( + kind="InfraInterfaceL3", + device__ids=[device.id], + role__value="backbone", + include=["connected_endpoint", "ip_addresses", "device"], + prefetch_relationships=True, + populate_store=True, + ) + + if len(interfaces) == 0: + raise ValueError("Couldn't find interfaces") + + # Return the first one of the list + return interfaces[0] + + +class Generator(InfrahubGenerator): + async def generate(self, data: dict) -> None: + log = logging.getLogger("infrahub.tasks") + service_id = data["InfraBackBoneService"]["edges"][0]["node"]["id"] + service_name = data["InfraBackBoneService"]["edges"][0]["node"]["name"]["value"] + circuit_id = data["InfraBackBoneService"]["edges"][0]["node"]["circuit_id"]["value"] + internal_circuit_id = data["InfraBackBoneService"]["edges"][0]["node"]["internal_circuit_id"]["value"] + site_a_id = data["InfraBackBoneService"]["edges"][0]["node"]["site_a"]["node"]["id"] + site_b_id = data["InfraBackBoneService"]["edges"][0]["node"]["site_b"]["node"]["id"] + provider_id = data["InfraBackBoneService"]["edges"][0]["node"]["provider"]["node"]["id"] + + # Create Circuit + log.info("Create Circuit") + circuit = await self.client.create( + kind="InfraCircuit", + provider={"id": provider_id}, + vendor_id=circuit_id, + circuit_id=internal_circuit_id, + status="active", + role="backbone", + ) + + await circuit.save(allow_upsert=True) + + # Retrieve one interface per Site to be use for Circuit Endpoints + log.info("Retrieve one interface per Site to be use for Circuit Endpoints") + interface_a = await find_interface(self.client, site_a_id) + interface_b = await find_interface(self.client, site_b_id) + + # Assign the 2 Interfaces as Circuit Endpoints + log.info("Assign the 2 Interfaces as Circuit Endpoints") + if not interface_a.connected_endpoint.initialized: + connected_endpoint_a = await self.client.create( + kind="InfraCircuitEndpoint", circuit=circuit, site=site_a_id, connected_endpoint=interface_a + ) + await connected_endpoint_a.save(allow_upsert=True) + else: + await interface_a.connected_endpoint.fetch() + + if ( + not interface_a.connected_endpoint.typename == "InfraCircuitEndpoint" + or not interface_a.connected_endpoint.peer.circuit.id == circuit.id + ): + raise ValueError( + f"{interface_a.name.value} on {interface_a.device.peer.name.value} is already connected!" + ) + + if not interface_b.connected_endpoint.initialized: + connected_endpoint_b = await self.client.create( + kind="InfraCircuitEndpoint", circuit=circuit, site=site_b_id, connected_endpoint=interface_b + ) + await connected_endpoint_b.save(allow_upsert=True) + else: + await interface_b.connected_endpoint.fetch() + + if ( + not interface_b.connected_endpoint.typename == "InfraCircuitEndpoint" + or not interface_b.connected_endpoint.peer.circuit.id == circuit.id + ): + raise ValueError( + f"{interface_b.name.value} on {interface_b.device.peer.name.value} is already connected!" + ) + + # Retrieve Pool for interconnection subnets + log.info("Retrieve Pool for interconnection subnets") + internal_networks_pool = await self.client.get(kind="CoreIPPrefixPool", name__value="Internal networks pool") + + # Allocate the next free IP prefix for the service + log.info("Allocate the next free IP prefix for the service") + prefix = await self.client.allocate_next_ip_prefix( + resource_pool=internal_networks_pool, + prefix_length=31, + member_type="address", + data={"is_pool": True}, + identifier=f"{service_name}-{service_id}", + ) + await prefix.save(allow_upsert=True) + + # Create a new Address Pool for this prefix + log.info("Create a new Address Pool for this prefix") + circuit_address_pool = await self.client.create( + kind="CoreIPAddressPool", + name=f"{service_name}-{service_id}", + default_address_type="IpamIPAddress", + default_prefix_size=31, + resources=[prefix], + is_pool=True, + ip_namespace={"id": "default"}, + ) + await circuit_address_pool.save(allow_upsert=True) + + # Use the new pool to allocate 2 IPs on the interfaces + log.info("Use the new pool to allocate 2 IPs on the interfaces") + interface_a_ip = await self.client.allocate_next_ip_address( + resource_pool=circuit_address_pool, + ) + interface_a.ip_addresses.add(interface_a_ip) + await interface_a.save(allow_upsert=True) + + interface_b_ip = await self.client.allocate_next_ip_address( + resource_pool=circuit_address_pool, + ) + interface_b.ip_addresses.add(interface_b_ip) + await interface_b.save(allow_upsert=True) + + log.info("Execution finishes with success") diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/circuit_endpoints.gql b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/circuit_endpoints.gql new file mode 100644 index 0000000000..df0d87a44d --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/circuit_endpoints.gql @@ -0,0 +1,22 @@ +query circuit_endpoints($circuit_id: String!) { + InfraCircuit(circuit_id__value: $circuit_id) { + edges { + node @expand { + __typename + id + provider { + node { + __typename + id + name {value} + } + } + circuit_id {value} + vendor_id {value} + endpoints { + count + } + } + } + } +} \ No newline at end of file diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/circuit_endpoints.py b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/circuit_endpoints.py new file mode 100644 index 0000000000..749552c374 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/circuit_endpoints.py @@ -0,0 +1,37 @@ +from infrahub_sdk.generator import InfrahubGenerator + +# In this Generator: We want to create 2 InfraCircuitEndpoints (A & Z) +# If the InfraCircuit doesn't have any CircuitEndpoints + + +class Generator(InfrahubGenerator): + async def generate(self, data: dict) -> None: + # Extract the first node in the 'InfraCircuit' edges array + circuits = data["InfraCircuit"]["edges"] + + for circuit in circuits: + # There is already endpoints, no need to add more :) + if circuit["node"]["endpoints"]["count"] != 0: + continue + + # Set local variables to easier manipulation + id = circuit["node"]["id"] + provider = circuit["node"]["provider"]["node"]["name"]["value"] + circuit_id: str = circuit["node"]["circuit_id"]["value"] + vendor_id: str = circuit["node"]["vendor_id"]["value"] + + # Manage description + description: str = f"{circuit_id} - ({provider.upper()}{' x ' + vendor_id.upper() if vendor_id else ''})" + + for i in range(1, 3): + data = { + "circuit": {"id": id}, + "description": {"value": description}, + } + if i == 1: + description += " - A Side" + elif i == 2: + description += " - Z Side" + + obj = await self.client.create(kind="InfraCircuitEndpoint", data=data) + await obj.save(allow_upsert=True) diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/drained_circuit_bgp_sessions.gql b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/drained_circuit_bgp_sessions.gql new file mode 100644 index 0000000000..cc1f277ead --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/drained_circuit_bgp_sessions.gql @@ -0,0 +1,37 @@ +query drained_circuit_bgp_sessions($circuit_id: String!) { + InfraCircuit(circuit_id__value: $circuit_id) { + edges { + node @expand { + __typename + id + provider { + node { + __typename + id + } + } + status { + value + } + endpoints { + count + edges { + node { + __typename + id + } + } + } + bgp_sessions { + count + edges { + node { + __typename + id + } + } + } + } + } + } +} \ No newline at end of file diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/drained_circuit_bgp_sessions.py b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/drained_circuit_bgp_sessions.py new file mode 100644 index 0000000000..ca8819ecb9 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/drained_circuit_bgp_sessions.py @@ -0,0 +1,27 @@ +from infrahub_sdk.generator import InfrahubGenerator + +# In this Generator: We want to drained the BGP Sessions linked to an InfraCircuit +# If the InfraCircuit Status is maintenance: +# - We are changing the status of the BGP Sessions to maintenance + + +class Generator(InfrahubGenerator): + async def generate(self, data: dict) -> None: + # Extract the first node in the 'InfraInterfaceL3' edges array + circuits = data["InfraCircuit"]["edges"] + + for circuit in circuits: + id = circuit["node"]["id"] # noqa: F841 + status: str = circuit["node"]["status"]["value"] + + if status != "maintenance": + continue # No need to change the status of the BGP Sessions + + if circuit["node"]["bgp_sessions"]["count"] == 0: + continue # There is no BGP Sessions associated with this circuit + + bgp_sessions = circuit["node"]["bgp_sessions"]["edges"] + for bgp_session in bgp_sessions: + obj = await self.client.get(kind=bgp_session["node"]["__typename"], id=bgp_session["node"]["id"]) + obj.status.value = "maintenance" + await obj.save(allow_upsert=True) diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/upstream_interfaces.gql b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/upstream_interfaces.gql new file mode 100644 index 0000000000..10a2315a51 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/upstream_interfaces.gql @@ -0,0 +1,41 @@ +query upstream_interfaces($id: ID!) { + InfraInterfaceL3(ids: [$id]) { + edges { + node @expand { + device { + node { + __typename + id + name { + value + } + } + } + status { value} + connected_endpoint { + node { + __typename + id + ... on InfraCircuitEndpoint { + __typename + circuit { + node { + vendor_id { + value + } + provider { + node { + __typename + id + name {value} + } + } + } + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/upstream_interfaces.py b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/upstream_interfaces.py new file mode 100644 index 0000000000..e470890681 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/generators/upstream_interfaces.py @@ -0,0 +1,39 @@ +from infrahub_sdk.generator import InfrahubGenerator + +# In this Generator: We are forcing the InterfaceL3 description +# If there is a InfraCircuit Connected on it +# If the InterfaceL3 status is provisioning + + +class Generator(InfrahubGenerator): + async def generate(self, data: dict) -> None: + # Extract the first node in the 'InfraInterfaceL3' edges array + upstream_interface = data["InfraInterfaceL3"]["edges"][0]["node"] + + # Set local variables to easier manipulation + provider = None + vendor_id = None + role: str = upstream_interface["role"]["value"] + status: str = upstream_interface["status"]["value"] + speed: int = upstream_interface["speed"]["value"] / 1000 + + if status != "provisioning": + return # We enforce it only on new interfaces to avoid "noise" + + # Check and extract data from the connected endpoint + if "connected_endpoint" in upstream_interface and "node" in upstream_interface["connected_endpoint"]: + connected_endpoint = upstream_interface["connected_endpoint"]["node"] + if "circuit" in connected_endpoint and "node" in connected_endpoint["circuit"]: + circuit = connected_endpoint["circuit"]["node"] + if "provider" in circuit and "node" in circuit["provider"]: + provider = circuit["provider"]["node"]["name"]["value"] + if "vendor_id" in circuit: + vendor_id = circuit["vendor_id"]["value"] + + # Update the object description if provider and vendor_id are available + if provider and vendor_id: + new_description = f"{role.upper()}: ({provider.upper()}x{vendor_id.upper()}) [{speed}Gbps]" + # Retrieve the object based on type and ID, then update its description + obj = await self.client.get(kind=upstream_interface["__typename"], id=upstream_interface["id"]) + obj.description.value = new_description + await obj.save(allow_upsert=True) diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/playbooks/avd-config.yml b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/playbooks/avd-config.yml new file mode 100644 index 0000000000..b51d19511a --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/playbooks/avd-config.yml @@ -0,0 +1,31 @@ +--- +- name: Query Interfaces information with the query_graphql module + hosts: platform_eos + gather_facts: false + vars: + ansible_connection: httpapi + ansible_network_os: eos + ansible_httpapi_use_ssl: true + ansible_httpapi_validate_certs: false + ansible_become: true + ansible_become_method: enable + ansible_user: admin + ansible_password: admin + + tasks: + - name: Query Artifact + opsmill.infrahub.artifact_fetch: + artifact_name: "Config variables for Arista AVD" + target_id: "{{ id }}" + api_endpoint: "http://localhost:8000" + token: 06438eb2-8019-4776-878c-0941b1f1d1ec + register: artifact + + - name: Set artifacts as facts + set_fact: + {"{{ item.key }}": "{{ item.value }}"} + loop: "{{ artifact.json | dict2items }}" + + - name: Generate configs + import_role: + name: arista.avd.eos_cli_config_gen diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/playbooks/inventory.yml b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/playbooks/inventory.yml new file mode 100644 index 0000000000..85026ca5b0 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/playbooks/inventory.yml @@ -0,0 +1,34 @@ +--- +plugin: opsmill.infrahub.inventory +api_endpoint: "http://localhost:8000" +token: 06438eb2-8019-4776-878c-0941b1f1d1ec +validate_certs: false + +# strict: True + +branch: "main" + +nodes: + InfraDevice: + include: + - name + - platform.ansible_network_os + - primary_address.address + - site.name + - role + - asn.asn + +compose: + hostname: name + platform: platform.ansible_network_os + ansible_host: primary_address.address | ansible.utils.ipaddr('address') + site: site.name + asn: asn.asn + +keyed_groups: + - prefix: site + key: site + - prefix: role + key: role.name + - prefix: platform + key: platform diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/schemas/demo_edge_fabric.yml b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/schemas/demo_edge_fabric.yml new file mode 100644 index 0000000000..9e6bea6fdb --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/schemas/demo_edge_fabric.yml @@ -0,0 +1,27 @@ +# yaml-language-server: $schema=https://schema.infrahub.app/develop/schema.schema.json +--- +version: "1.0" + +nodes: + - name: EdgeFabric + namespace: Demo + description: "." + label: "EdgeFabric" + default_filter: name__value + display_labels: + - name__value + attributes: + - name: name + kind: Text + # unique: true + - name: description + kind: Text + optional: true + - name: nbr_racks + kind: Number + relationships: + - name: tags + peer: BuiltinTag + optional: true + cardinality: many + kind: Attribute diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/templates/device_startup_config.tpl.j2 b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/templates/device_startup_config.tpl.j2 new file mode 100644 index 0000000000..21fc153e23 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/templates/device_startup_config.tpl.j2 @@ -0,0 +1,113 @@ +{% set ns = namespace(loopback_intf_name=none, loopback_ip=none, management_intf_name=none, management_ip=none) %} +{% for intf in data.InfraDevice.edges[0].node.interfaces.edges %} +{% if intf.node.role.value == "loopback" %} +{% set ns.loopback_intf_name = intf.node.name.value %} +{% set ns.loopback_ip = intf.node.ip_addresses.edges[0].node.address.value.split('/')[0] %} +{% elif intf.node.role.value == "management" %} +{% set ns.management_intf_name = intf.node.name.value %} +{% set ns.management_ip = intf.node.ip_addresses.edges[0].node.address.value.split('/')[0] %} +{% endif %} +{% endfor %} +no aaa root +! +username admin privilege 15 role network-admin secret sha512 $6$q4ez.aZgB/G/eeWW$ukvRobb5RtYmUlCcY0atxhwPmA6FPoRjR3AxYFJqNFoCRgJjrohKGrBsbY12n1uRZeCer1L8oejx5aPlrf.op0 +! +transceiver qsfp default-mode 4x10G +! +service routing protocols model multi-agent +! +hostname {{ data.InfraDevice.edges[0].node.name.value }} +! +spanning-tree mode mstp +! +management api http-commands + no shutdown +! +management api gnmi + transport grpc default +! +management api netconf + transport ssh default +! +{% for intf in data.InfraDevice.edges[0].node.interfaces.edges %} +{% if intf.node.name.value != ns.management_intf_name and intf.node.name.value != ns.loopback_intf_name %} +interface {{ intf.node.name.value }} +{% if intf.node["description"]["value"] %} + description {{ intf.node["description"]["value"] }} +{% else %} + description role: {{ intf.node.role.value }} +{% endif %} +{% if "mtu" in intf.node and intf.node["mtu"]["value"] %} + mtu {{ intf.node["mtu"]["value"] }} +{% endif %} +{% if not intf.node["enabled"]["value"] %} + shutdown +{% endif %} +{% if intf.node["ip_addresses"] %} +{% for ip in intf.node["ip_addresses"]["edges"] %} + ip address {{ ip.node["address"]["value"] }} + no switchport +{% if intf.node.role.value == "peer" or intf.node.role.value == "backbone" %} + ip ospf network point-to-point +{% endif %} +{% endfor %} +{% endif %} +! +{% endif %} +{% endfor %} +! +interface {{ ns.management_intf_name }} +{% for intf in data.InfraDevice.edges[0]["interfaces"] %} +{% if intf.node.name.value == ns.management_intf_name %} +{% for ip in intf["ip_addresses"] %} + ip address {{ ip["address"]["value"] }} +{% endfor %} +{% endif %} +{% endfor %} +! +interface {{ ns.loopback_intf_name }} +{% for intf in data.InfraDevice.edges[0]["interfaces"] %} +{% if intf.node.name.value == ns.loopback_intf_name %} +{% for ip in intf["ip_addresses"] %} + ip address {{ ip["address"]["value"] }} +{% endfor %} +{% endif %} +{% endfor %} +! +ip prefix-list BOGON-Prefixes seq 10 permit 172.16.0.0/12 le 24 +ip prefix-list BOGON-Prefixes seq 20 permit 192.168.0.0/16 le 24 +ip prefix-list BOGON-Prefixes seq 10 permit 172.16.0.0/12 le 24 +ip prefix-list BOGON-Prefixes seq 20 permit 192.168.0.0/16 le 24 +! +ip routing +! +ip route 0.0.0.0/0 172.20.20.1 +! +{% if data.InfraDevice.edges[0].node.asn %} +router bgp {{ data.InfraDevice.edges[0].node.asn.node.asn.value }} + router-id {{ loopback_ip }} +{% for peer_group in data.InfraBGPPeerGroup.edges %} + neighbor {{ peer_group.node.name.value }} peer group +{% if peer_group.node.local_as %} + neighbor {{ peer_group.node.name.value }} local-as {{ peer_group.node.local_as.node.asn.value }} +{% endif %} +{% if peer_group.node.remote_as and peer_group.node.remote_as.node %} + neighbor {{ peer_group.node.name.value }} remote-as {{ peer_group.node.remote_as.node.asn.value }} +{% endif %} +{% endfor %} +! +{% endif %} +! +router ospf 1 + router-id {{ loopback_ip }} + redistribute connected + max-lsa 12000 + passive-interface Loopback0 + network 0.0.0.0/0 area 0.0.0.0 +! +route-map BOGONS permit 10 + match ip address prefix-list BOGON-Prefixes +! +route-map BOGONS deny 20 +! +end diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/templates/device_startup_info.gql b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/templates/device_startup_info.gql new file mode 100644 index 0000000000..e750e482e9 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/templates/device_startup_info.gql @@ -0,0 +1,75 @@ +query device_startup_info ($device: String!) { + InfraDevice(name__value: $device) { + edges { + node { + id + name { + value + } + asn { + node { + asn { + value + } + } + } + interfaces { + edges { + node { + id + name { + value + } + description { + value + } + enabled { + value + } + mtu { + value + } + role { + value + } + ... on InfraInterfaceL3 { + ip_addresses { + edges { + node { + address { + value + } + } + } + } + } + } + } + } + } + } + } + InfraBGPPeerGroup { + edges { + node { + name { + value + } + local_as { + node { + asn { + value + } + } + } + remote_as { + node { + asn { + value + } + } + } + } + } + } +} diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/device_startup/baseline/input.json b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/device_startup/baseline/input.json new file mode 100644 index 0000000000..baaebaa82d --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/device_startup/baseline/input.json @@ -0,0 +1,746 @@ +{ + "data": { + "InfraDevice": { + "edges": [ + { + "node": { + "id": "17a49cb4-f436-4208-45b0-2fc3e8a69b5e", + "name": { + "value": "atl-spine1" + }, + "asn": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "interfaces": { + "edges": [ + { + "node": { + "id": "17a49cb5-40c7-ba98-45bb-2fc35123c445", + "name": { + "value": "Ethernet1" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "role": { + "value": "transit" + }, + "ip_addresses": { + "edges": [ + { + "node": { + "address": { + "value": "203.0.113.1/29" + } + } + } + ] + } + } + }, + { + "node": { + "id": "17a49cb6-58b8-95b8-45bd-2fc344c745a9", + "name": { + "value": "Ethernet10" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "role": { + "value": "spare" + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "id": "17a49cb6-5fa5-1b58-45ba-2fc31665bddd", + "name": { + "value": "Ethernet11" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "role": { + "value": "server" + } + } + }, + { + "node": { + "id": "17a49cb6-66fb-8270-45bf-2fc35467cd2a", + "name": { + "value": "Ethernet12" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "role": { + "value": "server" + } + } + }, + { + "node": { + "id": "17a49cb5-f691-1900-45b3-2fc3f988eccf", + "name": { + "value": "Ethernet2" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "role": { + "value": "transit" + }, + "ip_addresses": { + "edges": [ + { + "node": { + "address": { + "value": "203.0.113.9/29" + } + } + } + ] + } + } + }, + { + "node": { + "id": "17a49cb6-287c-1eb0-45bf-2fc373e12f43", + "name": { + "value": "Ethernet3" + }, + "description": { + "value": "Connected to atl-leaf1 Ethernet3" + }, + "enabled": { + "value": true + }, + "role": { + "value": "backbone" + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "id": "17a49cb6-2f82-98d8-45b0-2fc3e78b8c96", + "name": { + "value": "Ethernet4" + }, + "description": { + "value": "Connected to atl-leaf2 Ethernet3" + }, + "enabled": { + "value": true + }, + "role": { + "value": "backbone" + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "id": "17a49cb6-3642-49e8-45bb-2fc36f860291", + "name": { + "value": "Ethernet5" + }, + "description": { + "value": "Connected to atl-leaf3 Ethernet3" + }, + "enabled": { + "value": true + }, + "role": { + "value": "backbone" + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "id": "17a49cb6-3d1e-aab8-45b5-2fc320d4d273", + "name": { + "value": "Ethernet6" + }, + "description": { + "value": "Connected to atl-leaf4 Ethernet3" + }, + "enabled": { + "value": true + }, + "role": { + "value": "backbone" + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "id": "17a49cb6-43b3-4488-45b3-2fc38bb1050c", + "name": { + "value": "Ethernet7" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "role": { + "value": "spare" + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "id": "17a49cb6-4a74-4970-45bb-2fc38dbef7e2", + "name": { + "value": "Ethernet8" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "role": { + "value": "spare" + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "id": "17a49cb6-51ab-16b0-45bb-2fc37efe2d05", + "name": { + "value": "Ethernet9" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "role": { + "value": "spare" + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "id": "17a49cb5-12b4-f5a8-45bb-2fc3bce2aace", + "name": { + "value": "Loopback0" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "role": { + "value": "loopback" + }, + "ip_addresses": { + "edges": [ + { + "node": { + "address": { + "value": "10.0.0.7/32" + } + } + } + ] + } + } + }, + { + "node": { + "id": "17a49cb5-1e95-ea08-45b4-2fc33f8df0f3", + "name": { + "value": "Management0" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "role": { + "value": "management" + }, + "ip_addresses": { + "edges": [ + { + "node": { + "address": { + "value": "172.100.100.23/24" + } + } + } + ] + } + } + } + ] + } + } + } + ] + }, + "InfraBGPPeerGroup": { + "edges": [ + { + "node": { + "name": { + "value": "IX_DEFAULT" + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": null + } + } + }, + { + "node": { + "name": { + "value": "POP_GLOBAL" + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": null + } + } + }, + { + "node": { + "name": { + "value": "POP_INTERNAL" + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 64496 + } + } + } + } + }, + { + "node": { + "name": { + "value": "TRANSIT_DEFAULT" + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": null + } + } + }, + { + "node": { + "name": { + "value": "TRANSIT_TELIA" + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 1299 + } + } + } + } + } + ] + }, + "InfraTopology": { + "edges": [ + { + "node": { + "dns": { + "value": "8.8.8.8" + }, + "ntp": { + "value": "pool.ntp.org" + } + } + }, + { + "node": { + "dns": { + "value": "8.8.8.8" + }, + "ntp": { + "value": "pool.ntp.org" + } + } + } + ] + }, + "InfraBGPSession": { + "edges": [ + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.7/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.8/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.7/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf2" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.8/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf2" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.7/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf3" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.8/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf3" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.7/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf4" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.8/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf4" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.3/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.4/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.5/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.6/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.3/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine2" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.4/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine2" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.5/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine2" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.6/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine2" + } + } + } + } + } + ] + } + } + } \ No newline at end of file diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/device_startup/missing_interfaces/input.json b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/device_startup/missing_interfaces/input.json new file mode 100644 index 0000000000..8a438a60d6 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/device_startup/missing_interfaces/input.json @@ -0,0 +1,439 @@ +{ + "data": { + "InfraDevice": { + "edges": [ + { + "node": { + "id": "17a49cb4-f436-4208-45b0-2fc3e8a69b5e", + "name": { + "value": "atl-spine1" + }, + "asn": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "interfaces": { + "edges": [] + } + } + } + ] + }, + "InfraBGPPeerGroup": { + "edges": [ + { + "node": { + "name": { + "value": "IX_DEFAULT" + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": null + } + } + }, + { + "node": { + "name": { + "value": "POP_GLOBAL" + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": null + } + } + }, + { + "node": { + "name": { + "value": "POP_INTERNAL" + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 64496 + } + } + } + } + }, + { + "node": { + "name": { + "value": "TRANSIT_DEFAULT" + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": null + } + } + }, + { + "node": { + "name": { + "value": "TRANSIT_TELIA" + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 1299 + } + } + } + } + } + ] + }, + "InfraTopology": { + "edges": [ + { + "node": { + "dns": { + "value": "8.8.8.8" + }, + "ntp": { + "value": "pool.ntp.org" + } + } + }, + { + "node": { + "dns": { + "value": "8.8.8.8" + }, + "ntp": { + "value": "pool.ntp.org" + } + } + } + ] + }, + "InfraBGPSession": { + "edges": [ + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.7/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.8/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.7/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf2" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.8/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf2" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.7/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf3" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.8/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf3" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.7/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf4" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.8/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-leaf4" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.3/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.4/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.5/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.6/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine1" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.3/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine2" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.4/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine2" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.5/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine2" + } + } + } + } + }, + { + "node": { + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.6/32" + } + } + }, + "device": { + "node": { + "name": { + "value": "atl-spine2" + } + } + } + } + } + ] + } + } + } \ No newline at end of file diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/device_startup/missing_interfaces/output.txt b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/device_startup/missing_interfaces/output.txt new file mode 100644 index 0000000000..2b86b2c066 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/device_startup/missing_interfaces/output.txt @@ -0,0 +1,121 @@ +! +no aaa root +! +username admin privilege 15 role network-admin secret sha512 $6$q4ez.aZgB/G/eeWW$ukvRobb5RtYmUlCcY0atxhwPmA6FPoRjR3AxYFJqNFoCRgJjrohKGrBsbY12n1uRZeCer1L8oejx5aPlrf.op0 +! +transceiver qsfp default-mode 4x10G +! +service routing protocols model multi-agent +! +hostname atl-spine1 +ip name-server 8.8.8.8 +! +ntp server pool.ntp.org +! +spanning-tree mode mstp +! +management api http-commands + no shutdown +! +management api gnmi + transport grpc default +! +management api netconf + transport ssh default +! +interface Ethernet1 + description role: transit + ip address 203.0.113.1/29 + no switchport +! +interface Ethernet10 + description role: spare +! +interface Ethernet11 + description role: server +! +interface Ethernet12 + description role: server +! +interface Ethernet2 + description role: transit + ip address 203.0.113.9/29 + no switchport +! +interface Ethernet3 + description Connected to atl-leaf1 Ethernet3 + ip ospf area 1 + ip ospf network point-to-point +! +interface Ethernet4 + description Connected to atl-leaf2 Ethernet3 + ip ospf area 1 + ip ospf network point-to-point +! +interface Ethernet5 + description Connected to atl-leaf3 Ethernet3 + ip ospf area 1 + ip ospf network point-to-point +! +interface Ethernet6 + description Connected to atl-leaf4 Ethernet3 + ip ospf area 1 + ip ospf network point-to-point +! +interface Ethernet7 + description role: spare +! +interface Ethernet8 + description role: spare +! +interface Ethernet9 + description role: spare +! +! +interface Management0 + description role: management + ip address 172.100.100.23/24 + no shutdown +! +interface Loopback0 + description role: loopback + ip address 10.0.0.7/32 + no shutdown +! +ip prefix-list BOGON-Prefixes seq 10 permit 172.16.0.0/12 le 24 +ip prefix-list BOGON-Prefixes seq 20 permit 192.168.0.0/16 le 24 +ip prefix-list BOGON-Prefixes seq 10 permit 172.16.0.0/12 le 24 +ip prefix-list BOGON-Prefixes seq 20 permit 192.168.0.0/16 le 24 +! +ip routing +! +ip route 0.0.0.0/0 172.20.20.1 +! +router bgp 64496 + router-id 10.0.0.7 + neighbor IX_DEFAULT peer group + neighbor POP_GLOBAL peer group + neighbor POP_INTERNAL peer group + neighbor POP_INTERNAL remote-as 64496 + neighbor TRANSIT_DEFAULT peer group + neighbor TRANSIT_TELIA peer group + neighbor TRANSIT_TELIA remote-as 1299 + neighbor 10.0.0.3 peer group POP_INTERNAL + neighbor 10.0.0.4 peer group POP_INTERNAL + neighbor 10.0.0.5 peer group POP_INTERNAL + neighbor 10.0.0.6 peer group POP_INTERNAL +! +! +router ospf 1 + router-id 10.0.0.7 + redistribute connected + max-lsa 12000 + passive-interface Loopback0 + network 0.0.0.0/0 area 0.0.0.0 +! +route-map BOGONS permit 10 + match ip address prefix-list BOGON-Prefixes +! +route-map BOGONS deny 20 +! +end diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_bgp/input.json b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_bgp/input.json new file mode 100644 index 0000000000..a2d0be0048 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_bgp/input.json @@ -0,0 +1,481 @@ +{ + "data": { + "InfraBGPSession": { + "edges": [ + { + "node": { + "id": "d0d47169-b1f3-4255-ab86-c06b39ea8d84", + "peer_group": { + "node": { + "name": { + "value": "TRANSIT_TELIA" + } + } + }, + "local_ip": { + "node": { + "address": { + "value": "203.0.113.9/29" + } + } + }, + "remote_ip": { + "node": { + "address": { + "value": "203.0.113.10/29" + } + } + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 1299 + } + } + }, + "description": { + "value": null + } + } + }, + { + "node": { + "id": "d2e49bff-ddf2-484b-a4f5-f1ac076a2a2e", + "peer_group": { + "node": { + "name": { + "value": "TRANSIT_DEFAULT" + } + } + }, + "local_ip": { + "node": { + "address": { + "value": "203.0.113.49/29" + } + } + }, + "remote_ip": { + "node": { + "address": { + "value": "203.0.113.50/29" + } + } + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 8220 + } + } + }, + "description": { + "value": null + } + } + }, + { + "node": { + "id": "066cb468-3229-4595-a4a4-a88b3bef082c", + "peer_group": { + "node": { + "name": { + "value": "POP_INTERNAL" + } + } + }, + "local_ip": { + "node": { + "address": { + "value": "10.0.0.2/32" + } + } + }, + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.7/32" + } + } + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "description": { + "value": null + } + } + }, + { + "node": { + "id": "4ece88b2-665c-4929-ac3e-96430f91974a", + "peer_group": { + "node": { + "name": { + "value": "POP_GLOBAL" + } + } + }, + "local_ip": { + "node": { + "address": { + "value": "10.0.0.2/32" + } + } + }, + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.3/32" + } + } + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "description": { + "value": null + } + } + }, + { + "node": { + "id": "4b566879-b47d-452a-9b5a-2e2cee9190b7", + "peer_group": { + "node": { + "name": { + "value": "POP_GLOBAL" + } + } + }, + "local_ip": { + "node": { + "address": { + "value": "10.0.0.2/32" + } + } + }, + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.8/32" + } + } + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "description": { + "value": null + } + } + }, + { + "node": { + "id": "f315994a-0caa-4677-b65d-d9dda58b06f3", + "peer_group": { + "node": { + "name": { + "value": "POP_GLOBAL" + } + } + }, + "local_ip": { + "node": { + "address": { + "value": "10.0.0.2/32" + } + } + }, + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.4/32" + } + } + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "description": { + "value": null + } + } + }, + { + "node": { + "id": "1815f356-9f87-41c8-b817-42ecf6501628", + "peer_group": { + "node": { + "name": { + "value": "POP_GLOBAL" + } + } + }, + "local_ip": { + "node": { + "address": { + "value": "10.0.0.2/32" + } + } + }, + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.5/32" + } + } + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "description": { + "value": null + } + } + }, + { + "node": { + "id": "8c921dee-fe45-48c7-a940-ef10a2384a19", + "peer_group": { + "node": { + "name": { + "value": "POP_GLOBAL" + } + } + }, + "local_ip": { + "node": { + "address": { + "value": "10.0.0.2/32" + } + } + }, + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.1/32" + } + } + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "description": { + "value": null + } + } + }, + { + "node": { + "id": "69a3dc11-5440-403c-8013-40d6d0ebad0a", + "peer_group": { + "node": { + "name": { + "value": "POP_GLOBAL" + } + } + }, + "local_ip": { + "node": { + "address": { + "value": "10.0.0.2/32" + } + } + }, + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.6/32" + } + } + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "description": { + "value": null + } + } + }, + { + "node": { + "id": "3c82dabf-977b-4fce-b9a1-2f5f3be52a18", + "peer_group": { + "node": { + "name": { + "value": "POP_GLOBAL" + } + } + }, + "local_ip": { + "node": { + "address": { + "value": "10.0.0.2/32" + } + } + }, + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.9/32" + } + } + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "description": { + "value": null + } + } + }, + { + "node": { + "id": "207ea49c-6af7-4a92-9a99-3b38d73d0487", + "peer_group": { + "node": { + "name": { + "value": "POP_GLOBAL" + } + } + }, + "local_ip": { + "node": { + "address": { + "value": "10.0.0.2/32" + } + } + }, + "remote_ip": { + "node": { + "address": { + "value": "10.0.0.10/32" + } + } + }, + "local_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "remote_as": { + "node": { + "asn": { + "value": 64496 + } + } + }, + "description": { + "value": null + } + } + } + ] + } + } +} \ No newline at end of file diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_bgp/output.json b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_bgp/output.json new file mode 100644 index 0000000000..53ddae5fbc --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_bgp/output.json @@ -0,0 +1,105 @@ +{ + "openconfig-bgp:neighbors": { + "neighbor": [ + { + "neighbor-address": "203.0.113.10", + "config": { + "neighbor-address": "203.0.113.10", + "peer-group": "TRANSIT_TELIA", + "peer-as": 1299, + "local-as": 64496 + } + }, + { + "neighbor-address": "203.0.113.50", + "config": { + "neighbor-address": "203.0.113.50", + "peer-group": "TRANSIT_DEFAULT", + "peer-as": 8220, + "local-as": 64496 + } + }, + { + "neighbor-address": "10.0.0.7", + "config": { + "neighbor-address": "10.0.0.7", + "peer-group": "POP_INTERNAL", + "peer-as": 64496, + "local-as": 64496 + } + }, + { + "neighbor-address": "10.0.0.3", + "config": { + "neighbor-address": "10.0.0.3", + "peer-group": "POP_GLOBAL", + "peer-as": 64496, + "local-as": 64496 + } + }, + { + "neighbor-address": "10.0.0.8", + "config": { + "neighbor-address": "10.0.0.8", + "peer-group": "POP_GLOBAL", + "peer-as": 64496, + "local-as": 64496 + } + }, + { + "neighbor-address": "10.0.0.4", + "config": { + "neighbor-address": "10.0.0.4", + "peer-group": "POP_GLOBAL", + "peer-as": 64496, + "local-as": 64496 + } + }, + { + "neighbor-address": "10.0.0.5", + "config": { + "neighbor-address": "10.0.0.5", + "peer-group": "POP_GLOBAL", + "peer-as": 64496, + "local-as": 64496 + } + }, + { + "neighbor-address": "10.0.0.1", + "config": { + "neighbor-address": "10.0.0.1", + "peer-group": "POP_GLOBAL", + "peer-as": 64496, + "local-as": 64496 + } + }, + { + "neighbor-address": "10.0.0.6", + "config": { + "neighbor-address": "10.0.0.6", + "peer-group": "POP_GLOBAL", + "peer-as": 64496, + "local-as": 64496 + } + }, + { + "neighbor-address": "10.0.0.9", + "config": { + "neighbor-address": "10.0.0.9", + "peer-group": "POP_GLOBAL", + "peer-as": 64496, + "local-as": 64496 + } + }, + { + "neighbor-address": "10.0.0.10", + "config": { + "neighbor-address": "10.0.0.10", + "peer-group": "POP_GLOBAL", + "peer-as": 64496, + "local-as": 64496 + } + } + ] + } +} \ No newline at end of file diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_interfaces/input.json b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_interfaces/input.json new file mode 100644 index 0000000000..7b490b4f4b --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_interfaces/input.json @@ -0,0 +1,275 @@ +{ + "data": { + "InfraDevice": { + "edges": [ + { + "node": { + "id": "67bbcb4f-5ee7-4c9b-a598-58985ff3dbd4", + "interfaces": { + "edges": [ + { + "node": { + "name": { + "value": "Ethernet2" + }, + "description": { + "value": "Connected to ord1-edge2 Ethernet2" + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "name": { + "value": "Ethernet3" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "name": { + "value": "Ethernet12" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + } + } + }, + { + "node": { + "name": { + "value": "Ethernet4" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "name": { + "value": "Ethernet6" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [ + { + "node": { + "address": { + "value": "203.0.113.49/29" + } + } + } + ] + } + } + }, + { + "node": { + "name": { + "value": "Ethernet10" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "name": { + "value": "Ethernet1" + }, + "description": { + "value": "Connected to ord1-edge2 Ethernet1" + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "name": { + "value": "Ethernet11" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + } + } + }, + { + "node": { + "name": { + "value": "Ethernet9" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [ + { + "node": { + "address": { + "value": "203.0.113.81/29" + } + } + } + ] + } + } + }, + { + "node": { + "name": { + "value": "Loopback0" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [ + { + "node": { + "address": { + "value": "10.0.0.2/32" + } + } + } + ] + } + } + }, + { + "node": { + "name": { + "value": "Ethernet5" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [ + { + "node": { + "address": { + "value": "203.0.113.9/29" + } + } + } + ] + } + } + }, + { + "node": { + "name": { + "value": "Ethernet7" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [] + } + } + }, + { + "node": { + "name": { + "value": "Management0" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [ + { + "node": { + "address": { + "value": "172.20.20.18/24" + } + } + } + ] + } + } + }, + { + "node": { + "name": { + "value": "Ethernet8" + }, + "description": { + "value": null + }, + "enabled": { + "value": true + }, + "ip_addresses": { + "edges": [] + } + } + } + ] + } + } + } + ] + } + } +} \ No newline at end of file diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_interfaces/output.json b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_interfaces/output.json new file mode 100644 index 0000000000..9dc5318f1e --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/python_transforms/oc_interfaces/output.json @@ -0,0 +1,226 @@ +{ + "openconfig-interfaces:interface": [ + { + "name": "Ethernet2", + "config": { + "enabled": true, + "description": "Connected to ord1-edge2 Ethernet2" + }, + "subinterfaces": { + "subinterface": [] + } + }, + { + "name": "Ethernet3", + "config": { + "enabled": true + }, + "subinterfaces": { + "subinterface": [] + } + }, + { + "name": "Ethernet12", + "config": { + "enabled": true + } + }, + { + "name": "Ethernet4", + "config": { + "enabled": true + }, + "subinterfaces": { + "subinterface": [] + } + }, + { + "name": "Ethernet6", + "config": { + "enabled": true + }, + "subinterfaces": { + "subinterface": [ + { + "index": 0, + "openconfig-if-ip:ipv4": { + "addresses": { + "address": [ + { + "ip": "203.0.113.49", + "config": { + "ip": "203.0.113.49", + "prefix-length": "29" + } + } + ] + }, + "config": { + "enabled": true + } + } + } + ] + } + }, + { + "name": "Ethernet10", + "config": { + "enabled": true + }, + "subinterfaces": { + "subinterface": [] + } + }, + { + "name": "Ethernet1", + "config": { + "enabled": true, + "description": "Connected to ord1-edge2 Ethernet1" + }, + "subinterfaces": { + "subinterface": [] + } + }, + { + "name": "Ethernet11", + "config": { + "enabled": true + } + }, + { + "name": "Ethernet9", + "config": { + "enabled": true + }, + "subinterfaces": { + "subinterface": [ + { + "index": 0, + "openconfig-if-ip:ipv4": { + "addresses": { + "address": [ + { + "ip": "203.0.113.81", + "config": { + "ip": "203.0.113.81", + "prefix-length": "29" + } + } + ] + }, + "config": { + "enabled": true + } + } + } + ] + } + }, + { + "name": "Loopback0", + "config": { + "enabled": true + }, + "subinterfaces": { + "subinterface": [ + { + "index": 0, + "openconfig-if-ip:ipv4": { + "addresses": { + "address": [ + { + "ip": "10.0.0.2", + "config": { + "ip": "10.0.0.2", + "prefix-length": "32" + } + } + ] + }, + "config": { + "enabled": true + } + } + } + ] + } + }, + { + "name": "Ethernet5", + "config": { + "enabled": true + }, + "subinterfaces": { + "subinterface": [ + { + "index": 0, + "openconfig-if-ip:ipv4": { + "addresses": { + "address": [ + { + "ip": "203.0.113.9", + "config": { + "ip": "203.0.113.9", + "prefix-length": "29" + } + } + ] + }, + "config": { + "enabled": true + } + } + } + ] + } + }, + { + "name": "Ethernet7", + "config": { + "enabled": true + }, + "subinterfaces": { + "subinterface": [] + } + }, + { + "name": "Management0", + "config": { + "enabled": true + }, + "subinterfaces": { + "subinterface": [ + { + "index": 0, + "openconfig-if-ip:ipv4": { + "addresses": { + "address": [ + { + "ip": "172.20.20.18", + "config": { + "ip": "172.20.20.18", + "prefix-length": "24" + } + } + ] + }, + "config": { + "enabled": true + } + } + } + ] + } + }, + { + "name": "Ethernet8", + "config": { + "enabled": true + }, + "subinterfaces": { + "subinterface": [] + } + } + ] +} \ No newline at end of file diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_check.yml b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_check.yml new file mode 100644 index 0000000000..d93481144e --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_check.yml @@ -0,0 +1,9 @@ +--- +version: "1.0" +infrahub_tests: + - resource: Check + resource_name: "backbone_link_redundancy" + tests: + - name: syntax_check + spec: + kind: check-smoke diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_graphql.yml b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_graphql.yml new file mode 100644 index 0000000000..145ae01ee6 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_graphql.yml @@ -0,0 +1,34 @@ +--- +version: "1.0" +infrahub_tests: + # -------------------- GraphQLQuery -------------------- + - resource: GraphQLQuery + resource_name: check_backbone_link_redundancy + tests: + - name: syntax_check + spec: + path: checks/check_backbone_link_redundancy.gql + kind: graphql-query-smoke + + - resource: GraphQLQuery + resource_name: device_startup_info + tests: + - name: syntax_check + spec: + path: templates/device_startup_info.gql + kind: graphql-query-smoke + + - resource: GraphQLQuery + resource_name: oc_interfaces + tests: + - name: syntax_check + spec: + path: transforms/oc_interfaces.gql + kind: graphql-query-smoke + - resource: GraphQLQuery + resource_name: oc_bgp_neighbors + tests: + - name: syntax_check + spec: + path: transforms/oc_bgp_neighbors.gql + kind: graphql-query-smoke diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_transform_j2.yml b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_transform_j2.yml new file mode 100644 index 0000000000..e5cee940a4 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_transform_j2.yml @@ -0,0 +1,17 @@ +--- +version: "1.0" +infrahub_tests: + - resource: Jinja2Transform + resource_name: "device_startup" + tests: + - name: "baseline" + expect: PASS + spec: + kind: "jinja2-transform-unit-render" + directory: device_startup/baseline + + - name: "missing_interfaces" + expect: FAIL + spec: + kind: "jinja2-transform-unit-render" + directory: device_startup/missing_interfaces diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_transform_python.yml b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_transform_python.yml new file mode 100644 index 0000000000..4704901e21 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/tests/test_transform_python.yml @@ -0,0 +1,20 @@ +--- +version: "1.0" +infrahub_tests: + - resource: PythonTransform + resource_name: oc_bgp_neighbors + tests: + - name: oc_bgp + expect: PASS + spec: + kind: python-transform-unit-process + directory: python_transforms/oc_bgp + + - resource: PythonTransform + resource_name: OCInterfaces + tests: + - name: oc_interfaces + expect: PASS + spec: + kind: python-transform-unit-process + directory: python_transforms/oc_interfaces diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topo2.clabs.yml b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topo2.clabs.yml new file mode 100644 index 0000000000..b561dcab84 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topo2.clabs.yml @@ -0,0 +1,15 @@ +--- +# topology documentation: http://containerlab.srlinux.dev/lab-examples/min-clos/ +name: test_crpd + +topology: + nodes: + crpd: + kind: juniper_crpd + image: crpd:22.2R1.9 + srl: + kind: srl + image: ghcr.io/nokia/srlinux + + links: + - endpoints: ["srl:e1-1", "crpd:eth1"] diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topology.clabs.yml b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topology.clabs.yml new file mode 100644 index 0000000000..0f4777b2f9 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topology.clabs.yml @@ -0,0 +1,57 @@ +--- +# topology documentation: http://containerlab.srlinux.dev/lab-examples/min-clos/ +name: edge_triangle + +topology: + kinds: + srl: + image: ghcr.io/nokia/srlinux + linux: + image: ghcr.io/hellt/network-multitool + ceos: + kind: ceos + image: ceos:4.27.3F + nodes: + ord1-edge1: + kind: ceos + type: ixrd2 + mgmt_ipv4: 172.20.20.19 + startup-config: configs/startup/ord1-edge1.cfg + ord1-edge2: + kind: ceos + type: ixrd2 + mgmt_ipv4: 172.20.20.20 + startup-config: configs/startup/ord1-edge2.cfg + jfk1-edge1: + kind: ceos + type: ixrd2 + mgmt_ipv4: 172.20.20.21 + startup-config: configs/startup/jfk1-edge1.cfg + jfk1-edge2: + kind: ceos + type: ixrd2 + mgmt_ipv4: 172.20.20.22 + startup-config: configs/startup/jfk1-edge2.cfg + atl1-edge1: + kind: ceos + type: ixrd2 + mgmt_ipv4: 172.20.20.17 + startup-config: configs/startup/atl1-edge1.cfg + atl1-edge2: + kind: ceos + type: ixrd2 + mgmt_ipv4: 172.20.20.18 + startup-config: configs/startup/atl1-edge2.cfg + links: + - endpoints: ["atl1-edge1:eth1", "atl1-edge2:eth1"] + - endpoints: ["atl1-edge1:eth2", "atl1-edge2:eth2"] + - endpoints: ["ord1-edge1:eth1", "ord1-edge2:eth1"] + - endpoints: ["ord1-edge1:eth2", "ord1-edge2:eth2"] + - endpoints: ["jfk1-edge1:eth1", "jfk1-edge2:eth1"] + - endpoints: ["jfk1-edge1:eth2", "jfk1-edge2:eth2"] + - endpoints: ["atl1-edge1:eth4", "ord1-edge1:eth3"] + - endpoints: ["atl1-edge1:eth3", "jfk1-edge1:eth3"] + - endpoints: ["jfk1-edge1:eth4", "ord1-edge1:eth4"] + - endpoints: ["atl1-edge2:eth4", "ord1-edge2:eth3"] + - endpoints: ["atl1-edge2:eth3", "jfk1-edge2:eth3"] + - endpoints: ["jfk1-edge2:eth4", "ord1-edge2:eth4"] diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topology.tpl.j2 b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topology.tpl.j2 new file mode 100644 index 0000000000..0ffd3603de --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topology.tpl.j2 @@ -0,0 +1,38 @@ +# topology documentation: http://containerlab.srlinux.dev/lab-examples/min-clos/ +name: edge_triangle + +topology: + kinds: + srl: + image: ghcr.io/nokia/srlinux + linux: + image: ghcr.io/hellt/network-multitool + ceos: + kind: ceos + image: ceos:4.27.3F + nodes: +{% for device in data.device.edges %} + {{ device.node.name.value }}: + kind: ceos + type: ixrd2 +{% for intf in device.node.interfaces.edges %} +{% if intf.node.role.value == "management" %} +{% set management_ip = intf.node.ip_addresses.edges[0].node.address.value.split('/') %} + mgmt_ipv4: {{ management_ip[0] }} +{% endif %} +{% endfor %} + startup-config: configs/startup/{{ device.node.name.value }}.cfg +{% endfor %} + links: + - endpoints: ["atl1-edge1:eth1", "atl1-edge2:eth1"] + - endpoints: ["atl1-edge1:eth2", "atl1-edge2:eth2"] + - endpoints: ["ord1-edge1:eth1", "ord1-edge2:eth1"] + - endpoints: ["ord1-edge1:eth2", "ord1-edge2:eth2"] + - endpoints: ["jfk1-edge1:eth1", "jfk1-edge2:eth1"] + - endpoints: ["jfk1-edge1:eth2", "jfk1-edge2:eth2"] + - endpoints: ["atl1-edge1:eth4", "ord1-edge1:eth3"] + - endpoints: ["atl1-edge1:eth3", "jfk1-edge1:eth3"] + - endpoints: ["jfk1-edge1:eth4", "ord1-edge1:eth4"] + - endpoints: ["atl1-edge2:eth4", "ord1-edge2:eth3"] + - endpoints: ["atl1-edge2:eth3", "jfk1-edge2:eth3"] + - endpoints: ["jfk1-edge2:eth4", "ord1-edge2:eth4"] diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topology_info.gql b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topology_info.gql new file mode 100644 index 0000000000..142747dc12 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/topology/topology_info.gql @@ -0,0 +1,32 @@ +query topology_info { + InfraDevice { + edges { + node { + name { + value + } + interfaces { + edges { + node { + id + role { + value + } + ... on InfraInterfaceL3 { + ip_addresses { + edges { + node { + address { + value + } + } + } + } + } + } + } + } + } + } + } +} diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/__init__.py b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/computed_circuit_description.gql b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/computed_circuit_description.gql new file mode 100644 index 0000000000..a8eae70ce9 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/computed_circuit_description.gql @@ -0,0 +1,44 @@ +query CircuitDescriptionQuery($id: ID!) { + InfraCircuit(ids: [$id]) { + edges { + node { + circuit_id { + value + } + role { + value + } + + provider { + node { + name { + value + } + } + } + endpoints { + edges { + node { + connected_endpoint { + node { + ... on InfraInterfaceL3 { + name { + value + } + device { + node { + name { + value + } + } + } + } + } + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/computed_circuit_description.py b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/computed_circuit_description.py new file mode 100644 index 0000000000..af1a91cff6 --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/computed_circuit_description.py @@ -0,0 +1,23 @@ +from infrahub_sdk.transforms import InfrahubTransform + + +class ComputedCircuitDescription(InfrahubTransform): + query = "computed_circuit_description" + url = "computed_circuit_description" + + async def transform(self, data): + circuit_dict: dict = data["InfraCircuit"]["edges"][0]["node"] + + # If it's a backbone we compute a nice view + if circuit_dict["role"]["value"] == "backbone": + detailed_endpoints: list[str] = [] + + for endpoint in circuit_dict["endpoints"]["edges"]: + connected_endpoint: dict = endpoint["node"]["connected_endpoint"]["node"] + detailed_endpoints.append( + f"{connected_endpoint['device']['node']['name']['value']}::{connected_endpoint['name']['value']}" + ) + + return f" < {circuit_dict['circuit_id']['value']} > ".join(detailed_endpoints) + + return f"This {circuit_dict['role']['value']} circuit is provided by {circuit_dict['provider']['node']['name']['value']}" diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/oc_bgp_neighbors.gql b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/oc_bgp_neighbors.gql new file mode 100644 index 0000000000..8ffd303eda --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/oc_bgp_neighbors.gql @@ -0,0 +1,47 @@ +query oc_bgp_neighbors ($device: String!) { + InfraBGPSession(device__name__value: $device) { + edges { + node { + id + peer_group { + node { + name { + value + } + } + } + local_ip { + node { + address { + value + } + } + } + remote_ip { + node { + address { + value + } + } + } + local_as { + node { + asn { + value + } + } + } + remote_as { + node { + asn { + value + } + } + } + description { + value + } + } + } + } +} diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/oc_interfaces.gql b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/oc_interfaces.gql new file mode 100644 index 0000000000..3f91bbf8af --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/oc_interfaces.gql @@ -0,0 +1,35 @@ +query oc_interfaces ($device: String!) { + InfraDevice(name__value: $device) { + edges { + node { + id + interfaces { + edges { + node { + name { + value + } + description { + value + } + enabled { + value + } + ... on InfraInterfaceL3 { + ip_addresses { + edges { + node { + address { + value + } + } + } + } + } + } + } + } + } + } + } +} diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/openconfig.py b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/openconfig.py new file mode 100644 index 0000000000..366eb0903c --- /dev/null +++ b/backend/tests/fixtures/repos/infrahub-demo-edge-integration/initial__main/transforms/openconfig.py @@ -0,0 +1,79 @@ +from infrahub_sdk.transforms import InfrahubTransform + + +class OCInterfaces(InfrahubTransform): + query = "oc_interfaces" + + async def transform(self, data): + response_payload = {} + response_payload["openconfig-interfaces:interface"] = [] + + for intf in data["InfraDevice"]["edges"][0]["node"]["interfaces"]["edges"]: + intf_name = intf["node"]["name"]["value"] + + intf_config = { + "name": intf_name, + "config": {"enabled": intf["node"]["enabled"]["value"]}, + } + + if intf["node"].get("description", None) and intf["node"]["description"]["value"]: + intf_config["config"]["description"] = intf["node"]["description"]["value"] + + if intf["node"].get("ip_addresses", None): + intf_config["subinterfaces"] = {"subinterface": []} + + for idx, ip in enumerate(intf["node"]["ip_addresses"]["edges"]): + address, mask = ip["node"]["address"]["value"].split("/") + intf_config["subinterfaces"]["subinterface"].append( + { + "index": idx, + "openconfig-if-ip:ipv4": { + "addresses": { + "address": [ + { + "ip": address, + "config": { + "ip": address, + "prefix-length": mask, + }, + } + ] + }, + "config": {"enabled": True}, + }, + } + ) + + response_payload["openconfig-interfaces:interface"].append(intf_config) + + return response_payload + + +class OCBGPNeighbors(InfrahubTransform): + query = "oc_bgp_neighbors" + url = "openconfig/network-instances/network-instance/protocols/protocol/bgp/neighbors" + + async def transform(self, data): + response_payload = {} + + response_payload["openconfig-bgp:neighbors"] = {"neighbor": []} + + for session in data["InfraBGPSession"]["edges"]: + neighbor_address = session["node"]["remote_ip"]["node"]["address"]["value"].split("/")[0] + session_data = { + "neighbor-address": neighbor_address, + "config": {"neighbor-address": neighbor_address}, + } + + if session["node"]["peer_group"]: + session_data["config"]["peer-group"] = session["node"]["peer_group"]["node"]["name"]["value"] + + if session["node"]["remote_as"]: + session_data["config"]["peer-as"] = session["node"]["remote_as"]["node"]["asn"]["value"] + + if session["node"]["local_as"]: + session_data["config"]["local-as"] = session["node"]["local_as"]["node"]["asn"]["value"] + + response_payload["openconfig-bgp:neighbors"]["neighbor"].append(session_data) + + return response_payload diff --git a/backend/tests/fixtures/repos/infrahub-demo-edge/initial__main/.infrahub.yml b/backend/tests/fixtures/repos/infrahub-demo-edge/initial__main/.infrahub.yml index 50bd441c9a..75d8c71abc 100644 --- a/backend/tests/fixtures/repos/infrahub-demo-edge/initial__main/.infrahub.yml +++ b/backend/tests/fixtures/repos/infrahub-demo-edge/initial__main/.infrahub.yml @@ -1,6 +1,4 @@ --- -schemas: - - schemas/demo_edge_fabric.yml jinja2_transforms: - name: "device_startup" diff --git a/backend/tests/functional/ipam/test_ipam_rebase_reconcile.py b/backend/tests/functional/ipam/test_ipam_rebase_reconcile.py index eac9b55d32..68e97e73b7 100644 --- a/backend/tests/functional/ipam/test_ipam_rebase_reconcile.py +++ b/backend/tests/functional/ipam/test_ipam_rebase_reconcile.py @@ -89,7 +89,7 @@ async def test_step02_add_delete_prefix( ) -> None: branch = await create_branch(db=db, branch_name="delete_prefix") - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=registry.default_branch) + gql_params = await prepare_graphql_params(db=db, branch=registry.default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_IPPREFIX, @@ -145,8 +145,9 @@ async def test_step03_interlinked_prefixes_and_addresses( client: InfrahubClient, ) -> None: branch = await create_branch(db=db, branch_name="interlinked") + branch.update_schema_hash() - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=registry.default_branch) + gql_params = await prepare_graphql_params(db=db, branch=registry.default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_IPPREFIX, @@ -160,7 +161,7 @@ async def test_step03_interlinked_prefixes_and_addresses( assert result.data["IpamIPPrefixCreate"]["object"]["id"] net_10_0_0_0_7_id = result.data["IpamIPPrefixCreate"]["object"]["id"] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=CREATE_IPPREFIX, @@ -177,7 +178,7 @@ async def test_step03_interlinked_prefixes_and_addresses( assert result.data["IpamIPPrefixCreate"]["object"]["id"] net_10_0_0_0_15_id = result.data["IpamIPPrefixCreate"]["object"]["id"] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=registry.default_branch) + gql_params = await prepare_graphql_params(db=db, branch=registry.default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_IPPREFIX, @@ -194,7 +195,7 @@ async def test_step03_interlinked_prefixes_and_addresses( assert result.data["IpamIPPrefixCreate"]["object"]["id"] net_10_10_8_0_22_id = result.data["IpamIPPrefixCreate"]["object"]["id"] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=CREATE_IPADDRESS, diff --git a/backend/tests/functional/ipam/test_ipam_utilization.py b/backend/tests/functional/ipam/test_ipam_utilization.py index b8ce7363f8..bba9080e12 100644 --- a/backend/tests/functional/ipam/test_ipam_utilization.py +++ b/backend/tests/functional/ipam/test_ipam_utilization.py @@ -202,7 +202,7 @@ async def test_step01_graphql_prefix_pool_utilization( container = initial_dataset["container"] prefix_pool = initial_dataset["prefix_pool"] default_branch.update_schema_hash() - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=POOL_UTILIZATION_QUERY, @@ -234,7 +234,8 @@ async def test_step01_graphql_prefix_pool_utilization( } } - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=PREFIX_UTILIZATION_QUERY, @@ -263,7 +264,8 @@ async def test_step01_graphql_address_pool_utilization( ): prefix = initial_dataset["prefix"] address_pool = initial_dataset["address_pool"] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=POOL_UTILIZATION_QUERY, @@ -294,7 +296,7 @@ async def test_step01_graphql_address_pool_utilization( ], } } - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=PREFIX_UTILIZATION_QUERY, @@ -342,11 +344,12 @@ async def test_step02_branch_utilization( async def test_step02_graphql_prefix_pool_branch_utilization( self, db: InfrahubDatabase, default_branch: Branch, branch2: Branch, initial_dataset, step_02_dataset - ): + ) -> None: container = initial_dataset["container"] container_branch = step_02_dataset["container_branch"] prefix_pool = initial_dataset["prefix_pool"] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=POOL_UTILIZATION_QUERY, @@ -385,7 +388,8 @@ async def test_step02_graphql_prefix_pool_branch_utilization( } } in prefix_details_list - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch2) + branch2.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch2) result = await graphql( schema=gql_params.schema, source=PREFIX_UTILIZATION_QUERY, @@ -414,11 +418,12 @@ async def test_step02_graphql_prefix_pool_branch_utilization( async def test_step02_graphql_address_pool_branch_utilization( self, db: InfrahubDatabase, default_branch: Branch, branch2: Branch, initial_dataset, step_02_dataset - ): + ) -> None: prefix = initial_dataset["prefix"] prefix_branch = step_02_dataset["prefix_branch"] address_pool = initial_dataset["address_pool"] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=POOL_UTILIZATION_QUERY, @@ -457,7 +462,8 @@ async def test_step02_graphql_address_pool_branch_utilization( } } in prefix_details_list - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch2) + branch2.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch2) result = await graphql( schema=gql_params.schema, source=PREFIX_UTILIZATION_QUERY, @@ -520,11 +526,12 @@ async def test_step03_graphql_prefix_pool_delete_utilization( initial_dataset, step_02_dataset, step_03_dataset, - ): + ) -> None: container = initial_dataset["container"] container_branch = step_02_dataset["container_branch"] prefix_pool = initial_dataset["prefix_pool"] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=POOL_UTILIZATION_QUERY, @@ -563,7 +570,8 @@ async def test_step03_graphql_prefix_pool_delete_utilization( } } in prefix_details_list - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch2) + branch2.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch2) result = await graphql( schema=gql_params.schema, source=PREFIX_UTILIZATION_QUERY, @@ -598,11 +606,12 @@ async def test_step03_graphql_address_pool_delete_utilization( initial_dataset, step_02_dataset, step_03_dataset, - ): + ) -> None: prefix = initial_dataset["prefix"] prefix_branch = step_02_dataset["prefix_branch"] address_pool = initial_dataset["address_pool"] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=POOL_UTILIZATION_QUERY, @@ -641,7 +650,8 @@ async def test_step03_graphql_address_pool_delete_utilization( } } in prefix_details_list - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch2) + branch2.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch2) result = await graphql( schema=gql_params.schema, source=PREFIX_UTILIZATION_QUERY, diff --git a/backend/tests/helpers/db_validation.py b/backend/tests/helpers/db_validation.py index 8bad87b596..4705b43fa0 100644 --- a/backend/tests/helpers/db_validation.py +++ b/backend/tests/helpers/db_validation.py @@ -130,3 +130,38 @@ async def verify_no_duplicate_paths(db: InfrahubDatabase) -> None: raise ValueError( f"{num_paths} paths ({branch=},{edge_type=},{from_time=}) between nodes '{node_id1}' and '{node_id2}'" ) + + +async def validate_no_duplicate_attributes(db: InfrahubDatabase, branch: Branch) -> list[str]: + """ + Validate that no Nodes have duplicated attribute or relationship names + """ + branch_filter, branch_params = branch.get_query_filter_path() + + query = """ +// ------------- +// get all the active Attributes this branch and count them up +// ------------- +MATCH (n:Node)-[:HAS_ATTRIBUTE]->(field:Attribute) +WITH DISTINCT n, field +CALL (n, field) { +MATCH (n)-[r:HAS_ATTRIBUTE]->(field) +WHERE %(branch_filter)s +RETURN r +ORDER BY r.branch_level DESC, r.from DESC, r.status ASC +LIMIT 1 +} +WITH n, field, r +WHERE r.status = "active" AND r.to IS NULL +WITH n.uuid AS node_id, field.name AS field_name, count(*) AS num_fields +WHERE num_fields > 1 +RETURN node_id, field_name, num_fields + """ % {"branch_filter": branch_filter} + results = await db.execute_query(query=query, params=branch_params) + errors = [] + for result in results: + node_id = result.get("node_id") + field_name = result.get("field_name") + num_fields = result.get("num_fields") + errors.append(f"Node '{node_id}' has {num_fields} duplicated attributes with {field_name=}") + return errors diff --git a/backend/tests/helpers/graphql.py b/backend/tests/helpers/graphql.py index 25b62d38c1..f3f3959efc 100644 --- a/backend/tests/helpers/graphql.py +++ b/backend/tests/helpers/graphql.py @@ -70,11 +70,10 @@ async def graphql_mutation( account_session: AccountSession | None = None, ) -> ExecutionResult: branch = branch or await Branch.get_by_name(name="main", db=db) + branch.update_schema_hash() variables = variables or {} gql_params = await prepare_graphql_params( db=db, - include_subscription=False, - include_mutation=True, branch=branch, service=service, account_session=account_session, @@ -98,11 +97,10 @@ async def graphql_query( ) -> ExecutionResult: branch = branch or await Branch.get_by_name(name="main", db=db) variables = variables or {} + branch.update_schema_hash() gql_params = await prepare_graphql_params( db=db, - include_subscription=False, - include_mutation=False, branch=branch, service=service, account_session=account_session, diff --git a/backend/tests/helpers/test_app.py b/backend/tests/helpers/test_app.py index 3ab8ddbc2a..683638535f 100644 --- a/backend/tests/helpers/test_app.py +++ b/backend/tests/helpers/test_app.py @@ -25,6 +25,7 @@ from infrahub.core.schema.schema_branch import SchemaBranch from infrahub.core.utils import delete_all_nodes from infrahub.database import InfrahubDatabase +from infrahub.graphql.registry import registry as graphql_registry from infrahub.server import app, lifespan from infrahub.services import InfrahubServices from infrahub.services.adapters.workflow.local import WorkflowLocalExecution @@ -210,6 +211,7 @@ async def initialize_registry( await create_default_account_groups(db=db, admin_accounts=[admin_account], accounts=[unprivileged_account]) # This call emits a warning related to the fact database index manager has not been initialized. + graphql_registry.clear_cache() await initialization(db=db) async def assert_event(self, prefect_client: PrefectClient, event_name: str) -> None: diff --git a/backend/tests/integration/conftest.py b/backend/tests/integration/conftest.py index 57978efa04..85ff734c9e 100644 --- a/backend/tests/integration/conftest.py +++ b/backend/tests/integration/conftest.py @@ -1,7 +1,7 @@ import asyncio import os from pathlib import Path -from typing import Any +from typing import Any, Generator import pytest import yaml @@ -24,12 +24,12 @@ @pytest.fixture(scope="session", autouse=True) -def add_tracker(): +def add_tracker() -> None: os.environ["PYTEST_RUNNING"] = "true" @pytest.fixture(scope="session") -def event_loop(): +def event_loop() -> Generator: """Overrides pytest default function scoped event loop""" policy = asyncio.get_event_loop_policy() loop = policy.new_event_loop() @@ -37,7 +37,7 @@ def event_loop(): loop.close() -async def load_infrastructure_schema(db: InfrahubDatabase): +async def load_infrastructure_schema(db: InfrahubDatabase) -> None: base_dir = get_models_dir() / "base" default_branch_name = registry.default_branch @@ -57,7 +57,7 @@ async def load_infrastructure_schema(db: InfrahubDatabase): @pytest.fixture(scope="module") -async def init_db_infra(db: InfrahubDatabase): +async def init_db_infra(db: InfrahubDatabase) -> None: await delete_all_nodes(db=db) await first_time_initialization(db=db) await load_infrastructure_schema(db=db) @@ -65,7 +65,7 @@ async def init_db_infra(db: InfrahubDatabase): @pytest.fixture(scope="module") -async def init_db_base(db: InfrahubDatabase): +async def init_db_base(db: InfrahubDatabase) -> None: await delete_all_nodes(db=db) await first_time_initialization(db=db) await initialization(db=db) @@ -118,6 +118,13 @@ def git_repo_infrahub_demo_edge(git_sources_dir: Path) -> FileRepo: return FileRepo(name="infrahub-demo-edge", sources_directory=git_sources_dir) +@pytest.fixture(scope="session") +def git_repo_infrahub_demo_edge_integration(git_sources_dir: Path) -> FileRepo: + """Git Repository used as part of the demo-edge tutorial.""" + + return FileRepo(name="infrahub-demo-edge-integration", sources_directory=git_sources_dir) + + @pytest.fixture(scope="session") def git_repo_car_dealership(git_sources_dir: Path) -> FileRepo: """Simple Git Repository used for testing.""" @@ -126,12 +133,12 @@ def git_repo_car_dealership(git_sources_dir: Path) -> FileRepo: @pytest.fixture(scope="session", autouse=True) -def prefect_test_fixture(): +def prefect_test_fixture() -> Generator: with prefect_test_harness(server_startup_timeout=60): yield @pytest.fixture(scope="session") -def prefect_test(prefect_test_fixture): +def prefect_test(prefect_test_fixture) -> Generator: with disable_run_logger(): yield diff --git a/backend/tests/integration/git/test_git_repository.py b/backend/tests/integration/git/test_git_repository.py index bed638fd74..5730e5ef74 100644 --- a/backend/tests/integration/git/test_git_repository.py +++ b/backend/tests/integration/git/test_git_repository.py @@ -1,5 +1,4 @@ from pathlib import Path -from typing import AsyncGenerator import pytest import yaml @@ -71,11 +70,10 @@ async def test_client( return InfrahubTestClient(app=app) @pytest.fixture - async def client(self, test_client: InfrahubTestClient, integration_helper) -> AsyncGenerator[InfrahubClient, None]: + async def client(self, test_client: InfrahubTestClient, integration_helper) -> InfrahubClient: admin_token = await integration_helper.create_token() config = Config(api_token=admin_token, requester=test_client.async_request) - sdk_client = InfrahubClient(config=config) - return sdk_client + return InfrahubClient(config=config) @pytest.fixture(scope="class") async def query_99(self, db: InfrahubDatabase, test_client): @@ -90,13 +88,18 @@ async def query_99(self, db: InfrahubDatabase, test_client): @pytest.fixture async def repo( - self, test_client, client, db: InfrahubDatabase, git_repo_infrahub_demo_edge: FileRepo, git_repos_dir + self, + test_client, + client, + db: InfrahubDatabase, + git_repo_infrahub_demo_edge_integration: FileRepo, + git_repos_dir, ): # Create the repository in the Graph obj = await Node.init(schema=InfrahubKind.REPOSITORY, db=db) await obj.new( db=db, - name=git_repo_infrahub_demo_edge.name, + name=git_repo_infrahub_demo_edge_integration.name, description="test repository", location="git@github.com:mock/test.git", ) @@ -104,7 +107,10 @@ async def repo( # Initialize the repository on the file system repo = await InfrahubRepository.new( - id=obj.id, name=git_repo_infrahub_demo_edge.name, location=git_repo_infrahub_demo_edge.path, client=client + id=obj.id, + name=git_repo_infrahub_demo_edge_integration.name, + location=git_repo_infrahub_demo_edge_integration.path, + client=client, ) return repo diff --git a/backend/tests/integration/profiles/test_profile_lifecycle.py b/backend/tests/integration/profiles/test_profile_lifecycle.py index 58dc7b50f6..b8c5c193bb 100644 --- a/backend/tests/integration/profiles/test_profile_lifecycle.py +++ b/backend/tests/integration/profiles/test_profile_lifecycle.py @@ -3,10 +3,12 @@ from infrahub.core import registry from infrahub.core.branch.models import Branch +from infrahub.core.constants import HashableModelState from infrahub.core.manager import NodeManager from infrahub.core.node import Node from infrahub.core.schema import SchemaRoot from infrahub.core.schema.attribute_schema import AttributeSchema +from infrahub.core.schema.generic_schema import GenericSchema from infrahub.core.schema.node_schema import NodeSchema from infrahub.database import InfrahubDatabase from infrahub.graphql.initialization import prepare_graphql_params @@ -14,42 +16,292 @@ from tests.helpers.schema import load_schema from tests.helpers.test_app import TestInfrahubApp +PERSON_VALUES = """ + object { + id + profiles { edges { node { id } } } + name { + value + source { id } + is_from_profile + is_default + } + height { + value + source { id } + is_from_profile + is_default + } + weight { + value + source { id } + is_from_profile + is_default + } + eye_color { + value + source { id } + is_from_profile + is_default + } + description { + value + source { id } + is_from_profile + is_default + } + nothing { + value + source { id } + is_from_profile + is_default + } + size { + value + source { id } + is_from_profile + is_default + } + shape { + value + source { id } + is_from_profile + is_default + } + is_alive { + value + source { id } + is_from_profile + is_default + } + lifespan { + value + source { id } + is_from_profile + is_default + } + generic_nothing { + value + source { id } + is_from_profile + is_default + } + } +""" + +PERSON_UPDATE_QUERY = """ +mutation ($update_data: PretendPersonUpdateInput!) { + PretendPersonUpdate(data: $update_data) { + ok + %(person_values)s + } +} +""" % {"person_values": PERSON_VALUES} + class TestProfileLifecycle(TestInfrahubApp): @pytest.fixture(scope="class") - async def schema_person_base(self, db: InfrahubDatabase, initialize_registry) -> None: - person_schema = NodeSchema( + def generic_schema_base(self) -> GenericSchema: + return GenericSchema( + name="Lifeform", + namespace="Pretend", + attributes=[ + # size will become optional later to test that it is added to profiles + AttributeSchema(name="size", kind="TextArea", optional=False), + # shape will become mandatory later to test that it is removed from profiles + AttributeSchema(name="shape", kind="Text", optional=True), + # is_alive will become read_only=False later to test that it is added to profiles + AttributeSchema(name="is_alive", kind="Boolean", optional=True, read_only=True), + # lifespan will become read_only=True later to test that it is removed from profiles + AttributeSchema(name="lifespan", kind="Number", optional=True), + # generic_nothing will be removed later to test that it is removed from profiles + AttributeSchema(name="generic_nothing", kind="TextArea", optional=True), + ], + ) + + @pytest.fixture(scope="class") + def person_schema_base(self) -> NodeSchema: + return NodeSchema( name="Person", - namespace="Testing", - include_in_menu=True, + namespace="Pretend", + inherit_from=["PretendLifeform"], label="Person", attributes=[ AttributeSchema(name="name", kind="Text"), - AttributeSchema(name="description", kind="Text", optional=True), + # weight will become optional later to test that it is added to profiles + AttributeSchema(name="weight", kind="Number", optional=False), + # height will become mandatory later to test that it is removed from profiles AttributeSchema(name="height", kind="Number", optional=True), + # eye_color will become read_only=False later to test that it is added to profiles + AttributeSchema(name="eye_color", kind="Text", optional=True, read_only=True), + # description will become read_only=True later to test that it is removed from profiles + AttributeSchema(name="description", kind="TextArea", optional=True, default_value="placeholder"), + # nothing will be removed later to test that it is removed from profiles + AttributeSchema(name="nothing", kind="TextArea", optional=True), ], ) - await load_schema(db=db, schema=SchemaRoot(version="1.0", nodes=[person_schema])) @pytest.fixture(scope="class") - async def person_1(self, db: InfrahubDatabase, schema_person_base) -> Node: - schema = registry.schema.get_node_schema(name="TestingPerson", duplicate=False) + async def schema_root_01( + self, + db: InfrahubDatabase, + default_branch: Branch, + generic_schema_base: GenericSchema, + person_schema_base: NodeSchema, + client: InfrahubClient, + ) -> None: + schema_root = SchemaRoot(version="1.0", generics=[generic_schema_base], nodes=[person_schema_base]) + await load_schema(db=db, schema=schema_root, branch_name=default_branch.name, update_db=True) + + @pytest.fixture(scope="class") + async def generic_schema_add_attributes_to_profiles(self, generic_schema_base: GenericSchema) -> GenericSchema: + updated_generic_schema = generic_schema_base.model_copy(deep=True) + size_attribute = updated_generic_schema.get_attribute("size") + size_attribute.optional = True + is_alive_attribute = updated_generic_schema.get_attribute("is_alive") + is_alive_attribute.read_only = False + # new attribute that should be added to profiles + updated_generic_schema.attributes.append(AttributeSchema(name="value", kind="Number", optional=True)) + # new attribute that should NOT be added to profiles + updated_generic_schema.attributes.append( + AttributeSchema(name="generic_not_for_profiles", kind="Text", optional=True, read_only=True) + ) + return updated_generic_schema + + @pytest.fixture(scope="class") + async def person_schema_add_attributes_to_profiles(self, person_schema_base: NodeSchema) -> NodeSchema: + updated_person_schema = person_schema_base.model_copy(deep=True) + weight_attribute = updated_person_schema.get_attribute("weight") + weight_attribute.optional = True + eye_color_attribute = updated_person_schema.get_attribute("eye_color") + eye_color_attribute.read_only = False + # new attribute that should be added to profiles + updated_person_schema.attributes.append(AttributeSchema(name="age", kind="Number", optional=True)) + # new attribute that should NOT be added to profiles + updated_person_schema.attributes.append( + AttributeSchema(name="not_for_profiles", kind="Text", optional=True, read_only=True) + ) + return updated_person_schema + + @pytest.fixture(scope="class") + async def schema_root_02( + self, + db: InfrahubDatabase, + default_branch: Branch, + generic_schema_add_attributes_to_profiles: GenericSchema, + person_schema_add_attributes_to_profiles: NodeSchema, + client: InfrahubClient, + ) -> None: + schema_root = SchemaRoot( + version="1.0", + generics=[generic_schema_add_attributes_to_profiles], + nodes=[person_schema_add_attributes_to_profiles], + ) + response = await client.schema.load(schemas=[schema_root.model_dump()], branch=default_branch.name) + assert response.schema_updated + assert not response.errors + + @pytest.fixture(scope="class") + async def generic_schema_remove_attributes_from_profiles( + self, default_branch: Branch, generic_schema_add_attributes_to_profiles: GenericSchema, client: InfrahubClient + ) -> GenericSchema: + current_generic_schema = await client.schema.get( + kind="ProfilePretendLifeform", branch=default_branch.name, refresh=True + ) + current_generic_nothing_attribute = current_generic_schema.get_attribute("generic_nothing") + updated_generic_schema = generic_schema_add_attributes_to_profiles.model_copy(deep=True) + shape_attribute = updated_generic_schema.get_attribute("shape") + shape_attribute.optional = False + lifespan_attribute = updated_generic_schema.get_attribute("lifespan") + lifespan_attribute.read_only = True + generic_nothing_attribute = updated_generic_schema.get_attribute("generic_nothing") + generic_nothing_attribute.state = HashableModelState.ABSENT + generic_nothing_attribute.id = current_generic_nothing_attribute.id + return updated_generic_schema + + @pytest.fixture(scope="class") + async def person_schema_remove_attributes_from_profiles( + self, default_branch: Branch, person_schema_add_attributes_to_profiles: NodeSchema, client: InfrahubClient + ) -> NodeSchema: + current_person_schema = await client.schema.get( + kind="ProfilePretendPerson", branch=default_branch.name, refresh=True + ) + current_nothing_attribute = current_person_schema.get_attribute("nothing") + + updated_person_schema = person_schema_add_attributes_to_profiles.model_copy(deep=True) + height_attribute = updated_person_schema.get_attribute("height") + height_attribute.optional = False + description_attribute = updated_person_schema.get_attribute("description") + description_attribute.read_only = True + nothing_attribute = updated_person_schema.get_attribute("nothing") + nothing_attribute.state = HashableModelState.ABSENT + nothing_attribute.id = current_nothing_attribute.id + return updated_person_schema + + @pytest.fixture(scope="class") + async def schema_root_03( + self, + db: InfrahubDatabase, + default_branch: Branch, + generic_schema_remove_attributes_from_profiles: GenericSchema, + person_schema_remove_attributes_from_profiles: NodeSchema, + client: InfrahubClient, + ) -> None: + schema_root = SchemaRoot( + version="1.0", + generics=[generic_schema_remove_attributes_from_profiles], + nodes=[person_schema_remove_attributes_from_profiles], + ) + response = await client.schema.load(schemas=[schema_root.model_dump()], branch=default_branch.name) + assert response.schema_updated + assert not response.errors + + @pytest.fixture(scope="class") + async def person_1(self, db: InfrahubDatabase, schema_root_01) -> Node: + schema = registry.schema.get_node_schema(name="PretendPerson", duplicate=False) person_1 = await Node.init(db=db, schema=schema) - await person_1.new(db=db, name="Starbuck") + await person_1.new( + db=db, + name="Starbuck", + size="human", + weight=70, + ) await person_1.save(db=db) return person_1 @pytest.fixture(scope="class") - async def person_profile_1(self, db: InfrahubDatabase, schema_person_base) -> Node: - person_profile_1 = await Node.init(db=db, schema="ProfileTestingPerson") - await person_profile_1.new(db=db, profile_name="profile-one", profile_priority=10, height=167) + async def lifeform_profile_1(self, db: InfrahubDatabase, schema_root_01) -> Node: + lifeform_profile_1 = await Node.init(db=db, schema="ProfilePretendLifeform") + await lifeform_profile_1.new( + db=db, + profile_name="lifeform-profile-one", + profile_priority=3, + shape="lifeform-profile-one shape", + generic_nothing="lifeform-profile-one generic nothing", + ) + await lifeform_profile_1.save(db=db) + return lifeform_profile_1 + + @pytest.fixture(scope="class") + async def person_profile_1(self, db: InfrahubDatabase, schema_root_01) -> Node: + person_profile_1 = await Node.init(db=db, schema="ProfilePretendPerson") + await person_profile_1.new( + db=db, + profile_name="profile-one", + profile_priority=10, + height=167, + description="profile-one description", + lifespan=85, + nothing="profile-one nothing", + generic_nothing="profile-one generic nothing", + ) await person_profile_1.save(db=db) return person_profile_1 async def test_step_01_one_person_no_profile( - self, db: InfrahubDatabase, schema_person_base, person_1, person_profile_1, client: InfrahubClient + self, db: InfrahubDatabase, schema_root_01, person_1, person_profile_1, client: InfrahubClient ): - retrieved_person = await client.get(kind="TestingPerson", id=person_1.id, property=True) + retrieved_person = await client.get(kind="PretendPerson", id=person_1.id, property=True) assert retrieved_person.profiles.peer_ids == [] assert retrieved_person.name.value == "Starbuck" @@ -60,54 +312,38 @@ async def test_step_01_one_person_no_profile( assert retrieved_person.height.is_from_profile is False assert retrieved_person.height.source is None assert retrieved_person.height.is_default is True + assert retrieved_person.weight.value == 70 + assert retrieved_person.weight.is_from_profile is False + assert retrieved_person.weight.source is None + assert retrieved_person.weight.is_default is False + assert retrieved_person.size.value == "human" + assert retrieved_person.size.is_from_profile is False + assert retrieved_person.size.source is None + assert retrieved_person.size.is_default is False async def test_step_02_one_person_add_profile( self, db: InfrahubDatabase, - default_branch, + default_branch: Branch, person_1, person_profile_1, ): - mutation = """ - mutation { - TestingPersonUpdate(data: {id: "%(person_id)s", profiles: [{ id: "%(profile_id)s"}]}) { - ok - object { - id - profiles { edges { node { id } } } - name { - value - source { id } - is_from_profile - is_default - } - height { - value - source { id } - is_from_profile - is_default - } - } - } - } - """ % {"person_id": person_1.id, "profile_id": person_profile_1.id} - - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, - source=mutation, + source=PERSON_UPDATE_QUERY, context_value=gql_params.context, root_value=None, - variable_values={}, + variable_values={"update_data": {"id": person_1.id, "profiles": [{"id": person_profile_1.id}]}}, ) assert result.errors is None assert result.data - assert result.data["TestingPersonUpdate"]["ok"] is True - profiles = result.data["TestingPersonUpdate"]["object"]["profiles"]["edges"] + assert result.data["PretendPersonUpdate"]["ok"] is True + profiles = result.data["PretendPersonUpdate"]["object"]["profiles"]["edges"] assert len(profiles) == 1 assert profiles == [{"node": {"id": person_profile_1.id}}] - attributes = result.data["TestingPersonUpdate"]["object"] + attributes = result.data["PretendPersonUpdate"]["object"] assert attributes["id"] == person_1.id assert attributes["name"] == { "value": "Starbuck", @@ -121,6 +357,61 @@ async def test_step_02_one_person_add_profile( "source": {"id": person_profile_1.id}, "is_default": False, } + assert attributes["weight"] == { + "value": 70, + "is_from_profile": False, + "source": None, + "is_default": False, + } + assert attributes["eye_color"] == { + "value": None, + "is_from_profile": False, + "source": None, + "is_default": True, + } + assert attributes["description"] == { + "value": "profile-one description", + "is_from_profile": True, + "source": {"id": person_profile_1.id}, + "is_default": False, + } + assert attributes["nothing"] == { + "value": "profile-one nothing", + "is_from_profile": True, + "source": {"id": person_profile_1.id}, + "is_default": False, + } + assert attributes["size"] == { + "value": "human", + "is_from_profile": False, + "source": None, + "is_default": False, + } + assert attributes["shape"] == { + "value": None, + "is_from_profile": False, + "source": None, + "is_default": True, + } + assert attributes["is_alive"] == { + "value": None, + "is_from_profile": False, + "source": None, + "is_default": True, + } + assert attributes["lifespan"] == { + "value": 85, + "is_from_profile": True, + "source": {"id": person_profile_1.id}, + "is_default": False, + } + assert attributes["generic_nothing"] == { + "value": "profile-one generic nothing", + "is_from_profile": True, + "source": {"id": person_profile_1.id}, + "is_default": False, + } + retrieved_person = await NodeManager.get_one(db=db, id=person_1.id, include_source=True) assert retrieved_person.name.value == "Starbuck" assert retrieved_person.name.is_from_profile is False @@ -130,38 +421,66 @@ async def test_step_02_one_person_add_profile( assert retrieved_person.height.is_from_profile is True assert retrieved_person.height.source_id == person_profile_1.id assert retrieved_person.height.is_default is False + assert retrieved_person.weight.value == 70 + assert retrieved_person.weight.is_from_profile is False + assert retrieved_person.weight.source_id is None + assert retrieved_person.weight.is_default is False + assert retrieved_person.eye_color.value is None + assert retrieved_person.eye_color.is_from_profile is False + assert retrieved_person.eye_color.source_id is None + assert retrieved_person.eye_color.is_default is True + assert retrieved_person.description.value == "profile-one description" + assert retrieved_person.description.is_from_profile is True + assert retrieved_person.description.source_id == person_profile_1.id + assert retrieved_person.description.is_default is False + assert retrieved_person.nothing.value == "profile-one nothing" + assert retrieved_person.nothing.is_from_profile is True + assert retrieved_person.nothing.source_id == person_profile_1.id + assert retrieved_person.nothing.is_default is False + assert retrieved_person.size.value == "human" + assert retrieved_person.size.is_from_profile is False + assert retrieved_person.size.source_id is None + assert retrieved_person.size.is_default is False + assert retrieved_person.shape.value is None + assert retrieved_person.shape.is_from_profile is False + assert retrieved_person.shape.source_id is None + assert retrieved_person.shape.is_default is True + assert retrieved_person.is_alive.value is None + assert retrieved_person.is_alive.is_from_profile is False + assert retrieved_person.is_alive.source_id is None + assert retrieved_person.is_alive.is_default is True + assert retrieved_person.lifespan.value == 85 + assert retrieved_person.lifespan.is_from_profile is True + assert retrieved_person.lifespan.source_id == person_profile_1.id + assert retrieved_person.lifespan.is_default is False + assert retrieved_person.generic_nothing.value == "profile-one generic nothing" + assert retrieved_person.generic_nothing.is_from_profile is True + assert retrieved_person.generic_nothing.source_id == person_profile_1.id + assert retrieved_person.generic_nothing.is_default is False async def test_step_03_create_person_with_profile( self, db: InfrahubDatabase, - default_branch, + default_branch: Branch, person_profile_1, - ): + ) -> None: mutation = """ mutation { - TestingPersonCreate(data: {name: {value: "Apollo"}, profiles: [{ id: "%(profile_id)s"}]}) { + PretendPersonCreate(data: { + name: {value: "Apollo"}, + weight: {value: 85}, + size: {value: "bigger human"} + shape: {value: "bipedal"} + profiles: [{ id: "%(profile_id)s"}] + }) { ok - object { - id - profiles { edges { node { id } } } - name { - value - source { id } - is_from_profile - is_default - } - height { - value - source { id } - is_from_profile - is_default - } - } + %(person_values)s } } - """ % {"profile_id": person_profile_1.id} + """ % {"profile_id": person_profile_1.id, "person_values": PERSON_VALUES} - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=mutation, @@ -172,12 +491,12 @@ async def test_step_03_create_person_with_profile( assert result.errors is None assert result.data - assert result.data["TestingPersonCreate"]["ok"] is True - new_person_id = result.data["TestingPersonCreate"]["object"]["id"] - profiles = result.data["TestingPersonCreate"]["object"]["profiles"]["edges"] + assert result.data["PretendPersonCreate"]["ok"] is True + new_person_id = result.data["PretendPersonCreate"]["object"]["id"] + profiles = result.data["PretendPersonCreate"]["object"]["profiles"]["edges"] assert len(profiles) == 1 assert profiles == [{"node": {"id": person_profile_1.id}}] - attributes = result.data["TestingPersonCreate"]["object"] + attributes = result.data["PretendPersonCreate"]["object"] assert attributes["name"] == {"value": "Apollo", "is_from_profile": False, "source": None, "is_default": False} assert attributes["height"] == { "value": 167, @@ -185,6 +504,61 @@ async def test_step_03_create_person_with_profile( "source": {"id": person_profile_1.id}, "is_default": False, } + assert attributes["weight"] == { + "value": 85, + "is_from_profile": False, + "source": None, + "is_default": False, + } + assert attributes["eye_color"] == { + "value": None, + "is_from_profile": False, + "source": None, + "is_default": True, + } + assert attributes["description"] == { + "value": "profile-one description", + "is_from_profile": True, + "source": {"id": person_profile_1.id}, + "is_default": False, + } + assert attributes["nothing"] == { + "value": "profile-one nothing", + "is_from_profile": True, + "source": {"id": person_profile_1.id}, + "is_default": False, + } + assert attributes["size"] == { + "value": "bigger human", + "is_from_profile": False, + "source": None, + "is_default": False, + } + assert attributes["shape"] == { + "value": "bipedal", + "is_from_profile": False, + "source": None, + "is_default": False, + } + assert attributes["is_alive"] == { + "value": None, + "is_from_profile": False, + "source": None, + "is_default": True, + } + assert attributes["lifespan"] == { + "value": 85, + "is_from_profile": True, + "source": {"id": person_profile_1.id}, + "is_default": False, + } + assert attributes["generic_nothing"] == { + "value": "profile-one generic nothing", + "is_from_profile": True, + "source": {"id": person_profile_1.id}, + "is_default": False, + } + retrieved_person = await NodeManager.get_one(db=db, id=new_person_id, include_source=True) assert retrieved_person.name.value == "Apollo" assert retrieved_person.name.is_from_profile is False @@ -194,56 +568,68 @@ async def test_step_03_create_person_with_profile( assert retrieved_person.height.is_from_profile is True assert retrieved_person.height.source_id == person_profile_1.id assert retrieved_person.height.is_default is False + assert retrieved_person.weight.value == 85 + assert retrieved_person.weight.is_from_profile is False + assert retrieved_person.weight.source_id is None + assert retrieved_person.weight.is_default is False + assert retrieved_person.eye_color.value is None + assert retrieved_person.eye_color.is_from_profile is False + assert retrieved_person.eye_color.source_id is None + assert retrieved_person.eye_color.is_default is True + assert retrieved_person.description.value == "profile-one description" + assert retrieved_person.description.is_from_profile is True + assert retrieved_person.description.source_id == person_profile_1.id + assert retrieved_person.description.is_default is False + assert retrieved_person.nothing.value == "profile-one nothing" + assert retrieved_person.nothing.is_from_profile is True + assert retrieved_person.nothing.source_id == person_profile_1.id + assert retrieved_person.nothing.is_default is False + assert retrieved_person.size.value == "bigger human" + assert retrieved_person.size.is_from_profile is False + assert retrieved_person.size.source_id is None + assert retrieved_person.size.is_default is False + assert retrieved_person.shape.value == "bipedal" + assert retrieved_person.shape.is_from_profile is False + assert retrieved_person.shape.source_id is None + assert retrieved_person.shape.is_default is False + assert retrieved_person.is_alive.value is None + assert retrieved_person.is_alive.is_from_profile is False + assert retrieved_person.is_alive.source_id is None + assert retrieved_person.is_alive.is_default is True + assert retrieved_person.lifespan.value == 85 + assert retrieved_person.lifespan.is_from_profile is True + assert retrieved_person.lifespan.source_id == person_profile_1.id + assert retrieved_person.lifespan.is_default is False + assert retrieved_person.generic_nothing.value == "profile-one generic nothing" + assert retrieved_person.generic_nothing.is_from_profile is True + assert retrieved_person.generic_nothing.source_id == person_profile_1.id + assert retrieved_person.generic_nothing.is_default is False async def test_step_04_update_non_profile_attribute( self, db: InfrahubDatabase, - default_branch, + default_branch: Branch, person_1, person_profile_1, ): - mutation = """ - mutation { - TestingPersonUpdate(data: {id: "%(person_id)s", name: {value: "Kara Thrace"}}) { - ok - object { - id - profiles { edges { node { id } } } - name { - value - source { id } - is_from_profile - is_default - } - height { - value - source { id } - is_from_profile - is_default - } - } - } - } - """ % { - "person_id": person_1.id, - } - - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, - source=mutation, + source=PERSON_UPDATE_QUERY, context_value=gql_params.context, root_value=None, - variable_values={}, + variable_values={ + "update_data": {"id": person_1.id, "name": {"value": "Kara Thrace"}, "shape": {"value": "bipedal"}} + }, ) assert result.errors is None assert result.data - assert result.data["TestingPersonUpdate"]["ok"] is True - profiles = result.data["TestingPersonUpdate"]["object"]["profiles"]["edges"] + assert result.data["PretendPersonUpdate"]["ok"] is True + profiles = result.data["PretendPersonUpdate"]["object"]["profiles"]["edges"] assert len(profiles) == 1 assert profiles == [{"node": {"id": person_profile_1.id}}] - attributes = result.data["TestingPersonUpdate"]["object"] + attributes = result.data["PretendPersonUpdate"]["object"] assert attributes["id"] == person_1.id assert attributes["name"] == { "value": "Kara Thrace", @@ -251,6 +637,12 @@ async def test_step_04_update_non_profile_attribute( "source": None, "is_default": False, } + assert attributes["shape"] == { + "value": "bipedal", + "is_from_profile": False, + "source": None, + "is_default": False, + } assert attributes["height"] == { "value": 167, "is_from_profile": True, @@ -262,6 +654,10 @@ async def test_step_04_update_non_profile_attribute( assert retrieved_person.name.is_from_profile is False assert retrieved_person.name.source_id is None assert retrieved_person.name.is_default is False + assert retrieved_person.shape.value == "bipedal" + assert retrieved_person.shape.is_from_profile is False + assert retrieved_person.shape.source_id is None + assert retrieved_person.shape.is_default is False assert retrieved_person.height.value == 167 assert retrieved_person.height.is_from_profile is True assert retrieved_person.height.source_id == person_profile_1.id @@ -275,17 +671,19 @@ async def test_step_05_add_profile_with_person( client: InfrahubClient, ): profile = await client.create( - kind="ProfileTestingPerson", + kind="ProfilePretendPerson", profile_name="profile-two", profile_priority=5, height=156, + shape="regular", + size="average", related_nodes=[person_1.id], ) await profile.save() async def test_step_06_get_person_multiple_profiles(self, person_1, person_profile_1, client: InfrahubClient): - person_profile_2 = await client.get(kind="ProfileTestingPerson", profile_name__value="profile-two") - retrieved_person = await client.get(kind="TestingPerson", id=person_1.id, property=True) + person_profile_2 = await client.get(kind="ProfilePretendPerson", profile_name__value="profile-two") + retrieved_person = await client.get(kind="PretendPerson", id=person_1.id, property=True) await retrieved_person.profiles.fetch() assert set(retrieved_person.profiles.peer_ids) == {person_profile_1.id, person_profile_2.id} @@ -297,56 +695,92 @@ async def test_step_06_get_person_multiple_profiles(self, person_1, person_profi assert retrieved_person.height.is_from_profile is True assert retrieved_person.height.source.id == person_profile_2.id assert retrieved_person.height.is_default is False + assert retrieved_person.weight.value == 70 + assert retrieved_person.weight.is_from_profile is False + assert retrieved_person.weight.source is None + assert retrieved_person.weight.is_default is False + assert retrieved_person.eye_color.value is None + assert retrieved_person.eye_color.is_from_profile is False + assert retrieved_person.eye_color.source is None + assert retrieved_person.eye_color.is_default is True + assert retrieved_person.description.value == "profile-one description" + assert retrieved_person.description.is_from_profile is True + assert retrieved_person.description.source.id == person_profile_1.id + assert retrieved_person.description.is_default is False + assert retrieved_person.nothing.value == "profile-one nothing" + assert retrieved_person.nothing.is_from_profile is True + assert retrieved_person.nothing.source.id == person_profile_1.id + assert retrieved_person.nothing.is_default is False + assert retrieved_person.shape.value == "bipedal" + assert retrieved_person.shape.is_from_profile is False + assert retrieved_person.shape.source is None + assert retrieved_person.shape.is_default is False + assert retrieved_person.size.value == "human" + assert retrieved_person.size.is_from_profile is False + assert retrieved_person.size.source is None + assert retrieved_person.size.is_default is False async def test_step_07_update_person_delete_profile( self, db: InfrahubDatabase, - default_branch, + default_branch: Branch, client, ): - person_2 = await client.get(kind="TestingPerson", name__value="Apollo", property=True) - mutation = """ - mutation { - TestingPersonUpdate(data: {id: "%(person_id)s", profiles: []}) { - ok - object { - id - profiles { edges { node { id } } } - name { - value - source { id } - is_from_profile - is_default - } - height { - value - source { id } - is_from_profile - is_default - } - } - } - } - """ % {"person_id": person_2.id} - - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + person_2 = await client.get(kind="PretendPerson", name__value="Apollo", property=True) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, - source=mutation, + source=PERSON_UPDATE_QUERY, context_value=gql_params.context, root_value=None, - variable_values={}, + variable_values={"update_data": {"id": person_2.id, "profiles": []}}, ) assert result.errors is None assert result.data - assert result.data["TestingPersonUpdate"]["ok"] is True - profiles = result.data["TestingPersonUpdate"]["object"]["profiles"]["edges"] + assert result.data["PretendPersonUpdate"]["ok"] is True + profiles = result.data["PretendPersonUpdate"]["object"]["profiles"]["edges"] assert profiles == [] - attributes = result.data["TestingPersonUpdate"]["object"] + attributes = result.data["PretendPersonUpdate"]["object"] assert attributes["id"] == person_2.id assert attributes["name"] == {"value": "Apollo", "is_from_profile": False, "source": None, "is_default": False} assert attributes["height"] == {"value": None, "is_from_profile": False, "source": None, "is_default": True} + assert attributes["description"] == { + "value": "placeholder", + "is_from_profile": False, + "source": None, + "is_default": True, + } + assert attributes["nothing"] == { + "value": None, + "is_from_profile": False, + "source": None, + "is_default": True, + } + assert attributes["shape"] == { + "value": "bipedal", + "is_from_profile": False, + "source": None, + "is_default": False, + } + assert attributes["size"] == { + "value": "bigger human", + "is_from_profile": False, + "source": None, + "is_default": False, + } + assert attributes["lifespan"] == { + "value": None, + "is_from_profile": False, + "source": None, + "is_default": True, + } + assert attributes["generic_nothing"] == { + "value": None, + "is_from_profile": False, + "source": None, + "is_default": True, + } retrieved_person = await NodeManager.get_one(db=db, id=person_2.id, include_source=True) assert retrieved_person.name.value == "Apollo" @@ -357,6 +791,30 @@ async def test_step_07_update_person_delete_profile( assert retrieved_person.height.is_from_profile is False assert retrieved_person.height.source_id is None assert retrieved_person.height.is_default is True + assert retrieved_person.shape.value == "bipedal" + assert retrieved_person.shape.is_from_profile is False + assert retrieved_person.shape.source_id is None + assert retrieved_person.shape.is_default is False + assert retrieved_person.size.value == "bigger human" + assert retrieved_person.size.is_from_profile is False + assert retrieved_person.size.source_id is None + assert retrieved_person.size.is_default is False + assert retrieved_person.description.value == "placeholder" + assert retrieved_person.description.is_from_profile is False + assert retrieved_person.description.source_id is None + assert retrieved_person.description.is_default is True + assert retrieved_person.nothing.value is None + assert retrieved_person.nothing.is_from_profile is False + assert retrieved_person.nothing.source_id is None + assert retrieved_person.nothing.is_default is True + assert retrieved_person.lifespan.value is None + assert retrieved_person.lifespan.is_from_profile is False + assert retrieved_person.lifespan.source_id is None + assert retrieved_person.lifespan.is_default is True + assert retrieved_person.generic_nothing.value is None + assert retrieved_person.generic_nothing.is_from_profile is False + assert retrieved_person.generic_nothing.source_id is None + assert retrieved_person.generic_nothing.is_default is True async def test_step_08_delete_profile( self, @@ -364,15 +822,15 @@ async def test_step_08_delete_profile( default_branch, client: InfrahubClient, ): - person_profile_2 = await client.get(kind="ProfileTestingPerson", profile_name__value="profile-two") + person_profile_2 = await client.get(kind="ProfilePretendPerson", profile_name__value="profile-two") await person_profile_2.delete() async def test_step_09_check_persons( self, db: InfrahubDatabase, person_1, person_profile_1, client: InfrahubClient, default_branch: Branch ): - retrieved_person_1 = await client.get(kind="TestingPerson", id=person_1.id, property=True) + retrieved_person_1 = await client.get(kind="PretendPerson", id=person_1.id, property=True) await retrieved_person_1.profiles.fetch() - retrieved_person_2 = await client.get(kind="TestingPerson", name__value="Apollo", property=True) + retrieved_person_2 = await client.get(kind="PretendPerson", name__value="Apollo", property=True) assert retrieved_person_1.profiles.peer_ids == [person_profile_1.id] assert retrieved_person_1.name.value == "Kara Thrace" @@ -383,6 +841,31 @@ async def test_step_09_check_persons( assert retrieved_person_1.height.is_from_profile is True assert retrieved_person_1.height.source.id == person_profile_1.id assert retrieved_person_1.height.is_default is False + assert retrieved_person_1.weight.value == 70 + assert retrieved_person_1.weight.is_from_profile is False + assert retrieved_person_1.weight.source is None + assert retrieved_person_1.weight.is_default is False + assert retrieved_person_1.eye_color.value is None + assert retrieved_person_1.eye_color.is_from_profile is False + assert retrieved_person_1.eye_color.source is None + assert retrieved_person_1.eye_color.is_default is True + assert retrieved_person_1.description.value == "profile-one description" + assert retrieved_person_1.description.is_from_profile is True + assert retrieved_person_1.description.source.id == person_profile_1.id + assert retrieved_person_1.description.is_default is False + assert retrieved_person_1.nothing.value == "profile-one nothing" + assert retrieved_person_1.nothing.is_from_profile is True + assert retrieved_person_1.nothing.source.id == person_profile_1.id + assert retrieved_person_1.nothing.is_default is False + assert retrieved_person_1.size.value == "human" + assert retrieved_person_1.size.is_from_profile is False + assert retrieved_person_1.size.source is None + assert retrieved_person_1.size.is_default is False + assert retrieved_person_1.shape.value == "bipedal" + assert retrieved_person_1.shape.is_from_profile is False + assert retrieved_person_1.shape.source is None + assert retrieved_person_1.shape.is_default is False + assert retrieved_person_2.profiles.peer_ids == [] assert retrieved_person_2.name.value == "Apollo" assert retrieved_person_2.name.is_from_profile is False @@ -392,53 +875,59 @@ async def test_step_09_check_persons( assert retrieved_person_2.height.is_from_profile is False assert retrieved_person_2.height.source is None assert retrieved_person_2.height.is_default is True + assert retrieved_person_2.weight.value == 85 + assert retrieved_person_2.weight.is_from_profile is False + assert retrieved_person_2.weight.source is None + assert retrieved_person_2.weight.is_default is False + assert retrieved_person_2.eye_color.value is None + assert retrieved_person_2.eye_color.is_from_profile is False + assert retrieved_person_2.eye_color.source is None + assert retrieved_person_2.eye_color.is_default is True + assert retrieved_person_2.description.value == "placeholder" + assert retrieved_person_2.description.is_from_profile is False + assert retrieved_person_2.description.source is None + assert retrieved_person_2.description.is_default is True + assert retrieved_person_2.nothing.value is None + assert retrieved_person_2.nothing.is_from_profile is False + assert retrieved_person_2.nothing.source is None + assert retrieved_person_2.nothing.is_default is True + assert retrieved_person_2.size.value == "bigger human" + assert retrieved_person_2.size.is_from_profile is False + assert retrieved_person_2.size.source is None + assert retrieved_person_2.size.is_default is False + assert retrieved_person_2.shape.value == "bipedal" + assert retrieved_person_2.shape.is_from_profile is False + assert retrieved_person_2.shape.source is None + assert retrieved_person_2.shape.is_default is False async def test_step_10_update_person_override_profile( self, db: InfrahubDatabase, - default_branch, + default_branch: Branch, person_1, person_profile_1, ): - mutation = """ - mutation { - TestingPersonUpdate(data: {id: "%(person_id)s", height: {value: 145}}) { - ok - object { - id - profiles { edges { node { id } } } - name { - value - source { id } - is_from_profile - is_default - } - height { - value - source { id } - is_from_profile - is_default - } - } - } - } - """ % {"person_id": person_1.id} - - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, - source=mutation, + source=PERSON_UPDATE_QUERY, context_value=gql_params.context, root_value=None, - variable_values={}, + variable_values={ + "update_data": { + "id": person_1.id, + "height": {"value": 145}, + "shape": {"value": "symmetrical bilateral"}, + } + }, ) assert result.errors is None assert result.data - assert result.data["TestingPersonUpdate"]["ok"] is True - profiles = result.data["TestingPersonUpdate"]["object"]["profiles"]["edges"] + assert result.data["PretendPersonUpdate"]["ok"] is True + profiles = result.data["PretendPersonUpdate"]["object"]["profiles"]["edges"] assert profiles == [{"node": {"id": person_profile_1.id}}] - attributes = result.data["TestingPersonUpdate"]["object"] + attributes = result.data["PretendPersonUpdate"]["object"] assert attributes["id"] == person_1.id assert attributes["name"] == { "value": "Kara Thrace", @@ -447,6 +936,13 @@ async def test_step_10_update_person_override_profile( "is_default": False, } assert attributes["height"] == {"value": 145, "is_from_profile": False, "source": None, "is_default": False} + assert attributes["shape"] == { + "value": "symmetrical bilateral", + "is_from_profile": False, + "source": None, + "is_default": False, + } + retrieved_person = await NodeManager.get_one(db=db, id=person_1.id) assert retrieved_person.name.value == "Kara Thrace" assert retrieved_person.name.is_from_profile is False @@ -456,6 +952,10 @@ async def test_step_10_update_person_override_profile( assert retrieved_person.height.is_from_profile is False assert retrieved_person.height.source_id is None assert retrieved_person.height.is_default is False + assert retrieved_person.shape.value == "symmetrical bilateral" + assert retrieved_person.shape.is_from_profile is False + assert retrieved_person.shape.source_id is None + assert retrieved_person.shape.is_default is False async def test_step_11_update_existing_profile( self, @@ -465,15 +965,19 @@ async def test_step_11_update_existing_profile( person_1, client: InfrahubClient, ): - person_profile_1 = await client.get(kind="ProfileTestingPerson", id=person_profile_1.id) + person_profile_1 = await client.get(kind="ProfilePretendPerson", id=person_profile_1.id) person_profile_1.profile_priority.value = 11 person_profile_1.height.value = 134 + person_profile_1.nothing.value = "profile-one nothing updated" + person_profile_1.generic_nothing.value = "profile-one generic nothing updated" await person_profile_1.save() - updated_person_profile_1 = await client.get(kind="ProfileTestingPerson", id=person_profile_1.id) + updated_person_profile_1 = await client.get(kind="ProfilePretendPerson", id=person_profile_1.id) assert updated_person_profile_1.profile_name.value == "profile-one" assert updated_person_profile_1.profile_priority.value == 11 assert updated_person_profile_1.height.value == 134 + assert updated_person_profile_1.nothing.value == "profile-one nothing updated" + assert updated_person_profile_1.generic_nothing.value == "profile-one generic nothing updated" await updated_person_profile_1.related_nodes.fetch() assert len(updated_person_profile_1.related_nodes.peers) == 1 @@ -482,9 +986,9 @@ async def test_step_11_update_existing_profile( async def test_step_12_check_persons_again( self, db: InfrahubDatabase, default_branch: Branch, person_1, person_profile_1, client: InfrahubClient ): - retrieved_person_1 = await client.get(kind="TestingPerson", id=person_1.id, property=True) + retrieved_person_1 = await client.get(kind="PretendPerson", id=person_1.id, property=True) await retrieved_person_1.profiles.fetch() - retrieved_person_2 = await client.get(kind="TestingPerson", name__value="Apollo", property=True) + retrieved_person_2 = await client.get(kind="PretendPerson", name__value="Apollo", property=True) assert retrieved_person_1.profiles.peer_ids == [person_profile_1.id] assert retrieved_person_1.name.value == "Kara Thrace" @@ -495,6 +999,15 @@ async def test_step_12_check_persons_again( assert retrieved_person_1.height.is_from_profile is False assert retrieved_person_1.height.source is None assert retrieved_person_1.height.is_default is False + assert retrieved_person_1.nothing.value == "profile-one nothing updated" + assert retrieved_person_1.nothing.is_from_profile is True + assert retrieved_person_1.nothing.source.id == person_profile_1.id + assert retrieved_person_1.nothing.is_default is False + assert retrieved_person_1.generic_nothing.value == "profile-one generic nothing updated" + assert retrieved_person_1.generic_nothing.is_from_profile is True + assert retrieved_person_1.generic_nothing.source.id == person_profile_1.id + assert retrieved_person_1.generic_nothing.is_default is False + assert retrieved_person_2.profiles.peer_ids == [] assert retrieved_person_2.name.value == "Apollo" assert retrieved_person_2.name.is_from_profile is False @@ -504,6 +1017,14 @@ async def test_step_12_check_persons_again( assert retrieved_person_2.height.is_from_profile is False assert retrieved_person_2.height.source is None assert retrieved_person_2.height.is_default is True + assert retrieved_person_2.nothing.value is None + assert retrieved_person_2.nothing.is_from_profile is False + assert retrieved_person_2.nothing.source is None + assert retrieved_person_2.nothing.is_default is True + assert retrieved_person_2.generic_nothing.value is None + assert retrieved_person_2.generic_nothing.is_from_profile is False + assert retrieved_person_2.generic_nothing.source is None + assert retrieved_person_2.generic_nothing.is_default is True async def test_step_13_update_existing_profile_related_nodes( self, @@ -513,24 +1034,24 @@ async def test_step_13_update_existing_profile_related_nodes( person_1, client: InfrahubClient, ): - person_2 = await client.get(kind="TestingPerson", name__value="Apollo", property=True) - person_profile_1 = await client.get(kind="ProfileTestingPerson", id=person_profile_1.id) + person_2 = await client.get(kind="PretendPerson", name__value="Apollo", property=True) + person_profile_1 = await client.get(kind="ProfilePretendPerson", id=person_profile_1.id) await person_profile_1.related_nodes.fetch() person_profile_1.related_nodes.remove(person_1.id) person_profile_1.related_nodes.add(person_2) await person_profile_1.save() - updated_person_profile_1 = await client.get(kind="ProfileTestingPerson", id=person_profile_1.id) + updated_person_profile_1 = await client.get(kind="ProfilePretendPerson", id=person_profile_1.id) assert updated_person_profile_1.profile_name.value == "profile-one" await updated_person_profile_1.related_nodes.fetch() assert len(updated_person_profile_1.related_nodes.peers) == 1 assert updated_person_profile_1.related_nodes.peers[0].id == person_2.id async def test_step_14_check_persons_again( - self, db: InfrahubDatabase, default_branch: Branch, person_1, person_profile_1, client: InfrahubClient + self, default_branch: Branch, person_1, person_profile_1, client: InfrahubClient ): - retrieved_person_1 = await client.get(kind="TestingPerson", id=person_1.id, property=True) - retrieved_person_2 = await client.get(kind="TestingPerson", name__value="Apollo", property=True) + retrieved_person_1 = await client.get(kind="PretendPerson", id=person_1.id, property=True) + retrieved_person_2 = await client.get(kind="PretendPerson", name__value="Apollo", property=True) await retrieved_person_1.profiles.fetch() assert retrieved_person_1.profiles.peer_ids == [] @@ -538,10 +1059,46 @@ async def test_step_14_check_persons_again( assert retrieved_person_1.name.is_from_profile is False assert retrieved_person_1.name.source is None assert retrieved_person_1.name.is_default is False + assert retrieved_person_1.weight.value == 70 + assert retrieved_person_1.weight.is_from_profile is False + assert retrieved_person_1.weight.source is None + assert retrieved_person_1.weight.is_default is False assert retrieved_person_1.height.value == 145 assert retrieved_person_1.height.is_from_profile is False assert retrieved_person_1.height.source is None assert retrieved_person_1.height.is_default is False + assert retrieved_person_1.eye_color.value is None + assert retrieved_person_1.eye_color.is_from_profile is False + assert retrieved_person_1.eye_color.source is None + assert retrieved_person_1.eye_color.is_default is True + assert retrieved_person_1.description.value == "placeholder" + assert retrieved_person_1.description.is_from_profile is False + assert retrieved_person_1.description.source is None + assert retrieved_person_1.description.is_default is True + assert retrieved_person_1.nothing.value is None + assert retrieved_person_1.nothing.is_from_profile is False + assert retrieved_person_1.nothing.source is None + assert retrieved_person_1.nothing.is_default is True + assert retrieved_person_1.size.value == "human" + assert retrieved_person_1.size.is_from_profile is False + assert retrieved_person_1.size.source is None + assert retrieved_person_1.size.is_default is False + assert retrieved_person_1.shape.value == "symmetrical bilateral" + assert retrieved_person_1.shape.is_from_profile is False + assert retrieved_person_1.shape.source is None + assert retrieved_person_1.shape.is_default is False + assert retrieved_person_1.is_alive.value is None + assert retrieved_person_1.is_alive.is_from_profile is False + assert retrieved_person_1.is_alive.source is None + assert retrieved_person_1.is_alive.is_default is True + assert retrieved_person_1.lifespan.value is None + assert retrieved_person_1.lifespan.is_from_profile is False + assert retrieved_person_1.lifespan.source is None + assert retrieved_person_1.lifespan.is_default is True + assert retrieved_person_1.generic_nothing.value is None + assert retrieved_person_1.generic_nothing.is_from_profile is False + assert retrieved_person_1.generic_nothing.source is None + assert retrieved_person_1.generic_nothing.is_default is True await retrieved_person_2.profiles.fetch() assert retrieved_person_2.profiles.peer_ids == [person_profile_1.id] @@ -549,7 +1106,434 @@ async def test_step_14_check_persons_again( assert retrieved_person_2.name.is_from_profile is False assert retrieved_person_2.name.source is None assert retrieved_person_2.name.is_default is False + assert retrieved_person_2.weight.value == 85 + assert retrieved_person_2.weight.is_from_profile is False + assert retrieved_person_2.weight.source is None + assert retrieved_person_2.weight.is_default is False + assert retrieved_person_2.height.value == 134 + assert retrieved_person_2.height.is_from_profile is True + assert retrieved_person_2.height.source.id == person_profile_1.id + assert retrieved_person_2.height.is_default is False + assert retrieved_person_2.eye_color.value is None + assert retrieved_person_2.eye_color.is_from_profile is False + assert retrieved_person_2.eye_color.source is None + assert retrieved_person_2.eye_color.is_default is True + assert retrieved_person_2.description.value == "profile-one description" + assert retrieved_person_2.description.is_from_profile is True + assert retrieved_person_2.description.source.id == person_profile_1.id + assert retrieved_person_2.description.is_default is False + assert retrieved_person_2.nothing.value == "profile-one nothing updated" + assert retrieved_person_2.nothing.is_from_profile is True + assert retrieved_person_2.nothing.source.id == person_profile_1.id + assert retrieved_person_2.nothing.is_default is False + assert retrieved_person_2.size.value == "bigger human" + assert retrieved_person_2.size.is_from_profile is False + assert retrieved_person_2.size.source is None + assert retrieved_person_2.size.is_default is False + assert retrieved_person_2.shape.value == "bipedal" + assert retrieved_person_2.shape.is_from_profile is False + assert retrieved_person_2.shape.source is None + assert retrieved_person_2.shape.is_default is False + assert retrieved_person_2.is_alive.value is None + assert retrieved_person_2.is_alive.is_from_profile is False + assert retrieved_person_2.is_alive.source is None + assert retrieved_person_2.is_alive.is_default is True + assert retrieved_person_2.lifespan.value == 85 + assert retrieved_person_2.lifespan.is_from_profile is True + assert retrieved_person_2.lifespan.source.id == person_profile_1.id + assert retrieved_person_2.lifespan.is_default is False + assert retrieved_person_2.generic_nothing.value == "profile-one generic nothing updated" + assert retrieved_person_2.generic_nothing.is_from_profile is True + assert retrieved_person_2.generic_nothing.source.id == person_profile_1.id + assert retrieved_person_2.generic_nothing.is_default is False + + async def test_step_15_schema_update_add_attributes( + self, + default_branch, + person_profile_1: Node, + lifeform_profile_1: Node, + schema_root_02, + client: InfrahubClient, + ): + updated_person_profile_schema = await client.schema.get( + kind="ProfilePretendPerson", branch=default_branch.name, refresh=True + ) + assert set(updated_person_profile_schema.attribute_names) == { + "age", + "description", + "eye_color", + "generic_nothing", + "height", + "is_alive", + "lifespan", + "nothing", + "profile_name", + "profile_priority", + "shape", + "size", + "value", + "weight", + } + updated_lifeform_profile_schema = await client.schema.get( + kind="ProfilePretendLifeform", branch=default_branch.name, refresh=True + ) + assert set(updated_lifeform_profile_schema.attribute_names) == { + "size", + "shape", + "is_alive", + "lifespan", + "generic_nothing", + "profile_name", + "profile_priority", + "value", + } + + updated_person_profile_1 = await client.get(kind="ProfilePretendPerson", id=person_profile_1.id) + assert updated_person_profile_1.height.value == 134 + assert updated_person_profile_1.description.value == "profile-one description" + assert updated_person_profile_1.nothing.value == "profile-one nothing updated" + assert updated_person_profile_1.age.value is None + assert updated_person_profile_1.weight.value is None + assert updated_person_profile_1.eye_color.value is None + assert updated_person_profile_1.size.value is None + assert updated_person_profile_1.is_alive.value is None + assert updated_person_profile_1.value.value is None + with pytest.raises(AttributeError): + _ = updated_person_profile_1.not_for_profiles + with pytest.raises(AttributeError): + _ = updated_person_profile_1.generic_not_for_profiles + + updated_lifeform_profile_1 = await client.get(kind="ProfilePretendLifeform", id=lifeform_profile_1.id) + assert updated_lifeform_profile_1.size.value is None + assert updated_lifeform_profile_1.shape.value == "lifeform-profile-one shape" + assert updated_lifeform_profile_1.is_alive.value is None + assert updated_lifeform_profile_1.lifespan.value is None + assert updated_lifeform_profile_1.generic_nothing.value == "lifeform-profile-one generic nothing" + assert updated_lifeform_profile_1.value.value is None + with pytest.raises(AttributeError): + _ = updated_lifeform_profile_1.generic_not_for_profiles + + async def test_step_16_update_profile_with_new_attribute( + self, + default_branch, + person_profile_1: Node, + lifeform_profile_1: Node, + client: InfrahubClient, + ): + updated_person_profile_1 = await client.get(kind="ProfilePretendPerson", id=person_profile_1.id) + updated_person_profile_1.age.value = 25 + updated_person_profile_1.eye_color.value = "blurple" + updated_person_profile_1.value.value = 42 + updated_person_profile_1.is_alive.value = True + await updated_person_profile_1.save() + + updated_lifeform_profile_1 = await client.get(kind="ProfilePretendLifeform", id=lifeform_profile_1.id) + updated_lifeform_profile_1.size.value = "lifeform-profile-big" + updated_lifeform_profile_1.value.value = 84 + await updated_lifeform_profile_1.save() + + async def test_step_17_use_generic_profile( + self, + client: InfrahubClient, + person_profile_1: Node, + lifeform_profile_1: Node, + ): + person_two = await client.get(kind="PretendPerson", name__value="Apollo") + await person_two.profiles.fetch() + person_two.profiles.add(lifeform_profile_1.id) + await person_two.save() + + async def test_step_17_check_persons_again( + self, default_branch: Branch, person_1, person_profile_1: Node, lifeform_profile_1: Node, client: InfrahubClient + ): + retrieved_person_1 = await client.get(kind="PretendPerson", id=person_1.id, property=True) + retrieved_person_2 = await client.get(kind="PretendPerson", name__value="Apollo", property=True) + + await retrieved_person_1.profiles.fetch() + assert retrieved_person_1.profiles.peer_ids == [] + assert retrieved_person_1.name.value == "Kara Thrace" + assert retrieved_person_1.name.is_from_profile is False + assert retrieved_person_1.name.source is None + assert retrieved_person_1.name.is_default is False + assert retrieved_person_1.height.value == 145 + assert retrieved_person_1.height.is_from_profile is False + assert retrieved_person_1.height.source is None + assert retrieved_person_1.height.is_default is False + assert retrieved_person_1.weight.value == 70 + assert retrieved_person_1.weight.is_from_profile is False + assert retrieved_person_1.weight.source is None + assert retrieved_person_1.weight.is_default is False + assert retrieved_person_1.eye_color.value is None + assert retrieved_person_1.eye_color.is_from_profile is False + assert retrieved_person_1.eye_color.source is None + assert retrieved_person_1.eye_color.is_default is True + assert retrieved_person_1.description.value == "placeholder" + assert retrieved_person_1.description.is_from_profile is False + assert retrieved_person_1.description.source is None + assert retrieved_person_1.description.is_default is True + assert retrieved_person_1.nothing.value is None + assert retrieved_person_1.nothing.is_from_profile is False + assert retrieved_person_1.nothing.source is None + assert retrieved_person_1.nothing.is_default is True + assert retrieved_person_1.age.value is None + assert retrieved_person_1.age.is_from_profile is False + assert retrieved_person_1.age.source is None + assert retrieved_person_1.age.is_default is True + assert retrieved_person_1.size.value == "human" + assert retrieved_person_1.size.is_from_profile is False + assert retrieved_person_1.size.source is None + assert retrieved_person_1.size.is_default is False + assert retrieved_person_1.shape.value == "symmetrical bilateral" + assert retrieved_person_1.shape.is_from_profile is False + assert retrieved_person_1.shape.source is None + assert retrieved_person_1.shape.is_default is False + assert retrieved_person_1.is_alive.value is None + assert retrieved_person_1.is_alive.is_from_profile is False + assert retrieved_person_1.is_alive.source is None + assert retrieved_person_1.is_alive.is_default is True + assert retrieved_person_1.lifespan.value is None + assert retrieved_person_1.lifespan.is_from_profile is False + assert retrieved_person_1.lifespan.source is None + assert retrieved_person_1.lifespan.is_default is True + assert retrieved_person_1.generic_nothing.value is None + assert retrieved_person_1.generic_nothing.is_from_profile is False + assert retrieved_person_1.generic_nothing.source is None + assert retrieved_person_1.generic_nothing.is_default is True + assert retrieved_person_1.value.value is None + assert retrieved_person_1.value.is_from_profile is False + assert retrieved_person_1.value.source is None + assert retrieved_person_1.value.is_default is True + + await retrieved_person_2.profiles.fetch() + assert set(retrieved_person_2.profiles.peer_ids) == {person_profile_1.id, lifeform_profile_1.id} + assert retrieved_person_2.name.value == "Apollo" + assert retrieved_person_2.name.is_from_profile is False + assert retrieved_person_2.name.source is None + assert retrieved_person_2.name.is_default is False + assert retrieved_person_2.height.value == 134 + assert retrieved_person_2.height.is_from_profile is True + assert retrieved_person_2.height.source.id == person_profile_1.id + assert retrieved_person_2.height.is_default is False + assert retrieved_person_2.weight.value == 85 + assert retrieved_person_2.weight.is_from_profile is False + assert retrieved_person_2.weight.source is None + assert retrieved_person_2.weight.is_default is False + assert retrieved_person_2.eye_color.value == "blurple" + assert retrieved_person_2.eye_color.is_from_profile is True + assert retrieved_person_2.eye_color.source.id == person_profile_1.id + assert retrieved_person_2.eye_color.is_default is False + assert retrieved_person_2.description.value == "profile-one description" + assert retrieved_person_2.description.is_from_profile is True + assert retrieved_person_2.description.source.id == person_profile_1.id + assert retrieved_person_2.description.is_default is False + assert retrieved_person_2.nothing.value == "profile-one nothing updated" + assert retrieved_person_2.nothing.is_from_profile is True + assert retrieved_person_2.nothing.source.id == person_profile_1.id + assert retrieved_person_2.nothing.is_default is False + assert retrieved_person_2.age.value == 25 + assert retrieved_person_2.age.is_from_profile is True + assert retrieved_person_2.age.source.id == person_profile_1.id + assert retrieved_person_2.age.is_default is False + assert retrieved_person_2.size.value == "bigger human" + assert retrieved_person_2.size.is_from_profile is False + assert retrieved_person_2.size.source is None + assert retrieved_person_2.size.is_default is False + assert retrieved_person_2.shape.value == "bipedal" + assert retrieved_person_2.shape.is_from_profile is False + assert retrieved_person_2.shape.source is None + assert retrieved_person_2.shape.is_default is False + assert retrieved_person_2.is_alive.value is True + assert retrieved_person_2.is_alive.is_from_profile is True + assert retrieved_person_2.is_alive.source.id == person_profile_1.id + assert retrieved_person_2.is_alive.is_default is False + assert retrieved_person_2.lifespan.value == 85 + assert retrieved_person_2.lifespan.is_from_profile is True + assert retrieved_person_2.lifespan.source.id == person_profile_1.id + assert retrieved_person_2.lifespan.is_default is False + assert retrieved_person_2.generic_nothing.value == "lifeform-profile-one generic nothing" + assert retrieved_person_2.generic_nothing.is_from_profile is True + assert retrieved_person_2.generic_nothing.source.id == lifeform_profile_1.id + assert retrieved_person_2.generic_nothing.is_default is False + assert retrieved_person_2.value.value == 84 + assert retrieved_person_2.value.is_from_profile is True + assert retrieved_person_2.value.source.id == lifeform_profile_1.id + assert retrieved_person_2.value.is_default is False + + async def test_step_18_check_profile_for_removed_attribute( + self, + db: InfrahubDatabase, + schema_root_03, + default_branch, + person_profile_1, + lifeform_profile_1, + client: InfrahubClient, + ): + updated_schema = await client.schema.get(kind="ProfilePretendPerson", branch=default_branch.name, refresh=True) + assert set(updated_schema.attribute_names) == { + "age", + "eye_color", + "is_alive", + "profile_name", + "profile_priority", + "size", + "value", + "weight", + } + updated_lifeform_profile_schema = await client.schema.get( + kind="ProfilePretendLifeform", branch=default_branch.name, refresh=True + ) + assert set(updated_lifeform_profile_schema.attribute_names) == { + "size", + "is_alive", + "profile_name", + "profile_priority", + "value", + } + + person_profile_1 = await client.get(kind="ProfilePretendPerson", id=person_profile_1.id) + assert person_profile_1.age.value == 25 + assert person_profile_1.eye_color.value == "blurple" + assert person_profile_1.is_alive.value is True + assert person_profile_1.size.value is None + assert person_profile_1.value.value == 42 + assert person_profile_1.weight.value is None + + with pytest.raises(AttributeError): + _ = person_profile_1.height + with pytest.raises(AttributeError): + _ = person_profile_1.description + with pytest.raises(AttributeError): + _ = person_profile_1.nothing + with pytest.raises(AttributeError): + _ = person_profile_1.shape + with pytest.raises(AttributeError): + _ = person_profile_1.lifespan + with pytest.raises(AttributeError): + _ = person_profile_1.generic_nothing + + lifeform_profile_1 = await client.get(kind="ProfilePretendLifeform", id=lifeform_profile_1.id) + assert lifeform_profile_1.size.value == "lifeform-profile-big" + assert lifeform_profile_1.is_alive.value is None + assert lifeform_profile_1.value.value == 84 + + with pytest.raises(AttributeError): + _ = lifeform_profile_1.shape + with pytest.raises(AttributeError): + _ = lifeform_profile_1.lifespan + with pytest.raises(AttributeError): + _ = lifeform_profile_1.generic_nothing + + async def test_step_19_check_persons_again( + self, + db: InfrahubDatabase, + default_branch: Branch, + person_1, + person_profile_1, + lifeform_profile_1, + client: InfrahubClient, + ): + await client.schema.get(kind="PretendPerson", branch=default_branch.name, refresh=True) + retrieved_person_1 = await client.get(kind="PretendPerson", id=person_1.id, property=True) + retrieved_person_2 = await client.get(kind="PretendPerson", name__value="Apollo", property=True) + + await retrieved_person_1.profiles.fetch() + assert retrieved_person_1.profiles.peer_ids == [] + assert retrieved_person_1.name.value == "Kara Thrace" + assert retrieved_person_1.name.is_from_profile is False + assert retrieved_person_1.name.source is None + assert retrieved_person_1.name.is_default is False + assert retrieved_person_1.age.value is None + assert retrieved_person_1.age.is_from_profile is False + assert retrieved_person_1.age.source is None + assert retrieved_person_1.age.is_default is True + assert retrieved_person_1.height.value == 145 + assert retrieved_person_1.height.is_from_profile is False + assert retrieved_person_1.height.source is None + assert retrieved_person_1.height.is_default is False + assert retrieved_person_1.weight.value == 70 + assert retrieved_person_1.weight.is_from_profile is False + assert retrieved_person_1.weight.source is None + assert retrieved_person_1.weight.is_default is False + assert retrieved_person_1.eye_color.value is None + assert retrieved_person_1.eye_color.is_from_profile is False + assert retrieved_person_1.eye_color.source is None + assert retrieved_person_1.eye_color.is_default is True + assert retrieved_person_1.description.value == "placeholder" + assert retrieved_person_1.description.is_from_profile is False + assert retrieved_person_1.description.source is None + assert retrieved_person_1.description.is_default is True + assert retrieved_person_1.size.value == "human" + assert retrieved_person_1.size.is_from_profile is False + assert retrieved_person_1.size.source is None + assert retrieved_person_1.size.is_default is False + assert retrieved_person_1.shape.value == "symmetrical bilateral" + assert retrieved_person_1.shape.is_from_profile is False + assert retrieved_person_1.shape.source is None + assert retrieved_person_1.shape.is_default is False + assert retrieved_person_1.is_alive.value is None + assert retrieved_person_1.is_alive.is_from_profile is False + assert retrieved_person_1.is_alive.source is None + assert retrieved_person_1.is_alive.is_default is True + assert retrieved_person_1.lifespan.value is None + assert retrieved_person_1.lifespan.is_from_profile is False + assert retrieved_person_1.lifespan.source is None + assert retrieved_person_1.lifespan.is_default is True + assert retrieved_person_1.value.value is None + assert retrieved_person_1.value.is_from_profile is False + assert retrieved_person_1.value.source is None + assert retrieved_person_1.value.is_default is True + with pytest.raises(AttributeError): + _ = retrieved_person_1.nothing + with pytest.raises(AttributeError): + _ = retrieved_person_1.generic_nothing + + await retrieved_person_2.profiles.fetch() + assert set(retrieved_person_2.profiles.peer_ids) == {person_profile_1.id, lifeform_profile_1.id} + assert retrieved_person_2.name.value == "Apollo" + assert retrieved_person_2.name.is_from_profile is False + assert retrieved_person_2.name.source is None + assert retrieved_person_2.name.is_default is False + assert retrieved_person_2.age.value == 25 + assert retrieved_person_2.age.is_from_profile is True + assert retrieved_person_2.age.source.id == person_profile_1.id + assert retrieved_person_2.age.is_default is False assert retrieved_person_2.height.value == 134 assert retrieved_person_2.height.is_from_profile is True assert retrieved_person_2.height.source.id == person_profile_1.id assert retrieved_person_2.height.is_default is False + assert retrieved_person_2.weight.value == 85 + assert retrieved_person_2.weight.is_from_profile is False + assert retrieved_person_2.weight.source is None + assert retrieved_person_2.weight.is_default is False + assert retrieved_person_2.eye_color.value == "blurple" + assert retrieved_person_2.eye_color.is_from_profile is True + assert retrieved_person_2.eye_color.source.id == person_profile_1.id + assert retrieved_person_2.eye_color.is_default is False + assert retrieved_person_2.description.value == "profile-one description" + assert retrieved_person_2.description.is_from_profile is True + assert retrieved_person_2.description.source.id == person_profile_1.id + assert retrieved_person_2.description.is_default is False + assert retrieved_person_2.size.value == "bigger human" + assert retrieved_person_2.size.is_from_profile is False + assert retrieved_person_2.size.source is None + assert retrieved_person_2.size.is_default is False + assert retrieved_person_2.shape.value == "bipedal" + assert retrieved_person_2.shape.is_from_profile is False + assert retrieved_person_2.shape.source is None + assert retrieved_person_2.shape.is_default is False + assert retrieved_person_2.is_alive.value is True + assert retrieved_person_2.is_alive.is_from_profile is True + assert retrieved_person_2.is_alive.source.id == person_profile_1.id + assert retrieved_person_2.is_alive.is_default is False + assert retrieved_person_2.lifespan.value == 85 + assert retrieved_person_2.lifespan.is_from_profile is True + assert retrieved_person_2.lifespan.source.id == person_profile_1.id + assert retrieved_person_2.lifespan.is_default is False + assert retrieved_person_2.value.value == 84 + assert retrieved_person_2.value.is_from_profile is True + assert retrieved_person_2.value.source.id == lifeform_profile_1.id + assert retrieved_person_2.value.is_default is False + with pytest.raises(AttributeError): + _ = retrieved_person_2.nothing + with pytest.raises(AttributeError): + _ = retrieved_person_2.generic_nothing diff --git a/backend/tests/integration/schema_lifecycle/test_generic_migrations.py b/backend/tests/integration/schema_lifecycle/test_generic_migrations.py index 7002fbd034..90f5111c77 100644 --- a/backend/tests/integration/schema_lifecycle/test_generic_migrations.py +++ b/backend/tests/integration/schema_lifecycle/test_generic_migrations.py @@ -13,6 +13,7 @@ from infrahub.core.schema.node_schema import NodeSchema from infrahub.database import InfrahubDatabase from infrahub.database.validation import verify_no_duplicate_relationships, verify_no_edges_added_after_node_delete +from tests.helpers.db_validation import validate_no_duplicate_attributes from ..shared import load_schema from .shared import TestSchemaLifecycleBase @@ -217,6 +218,14 @@ async def branch(self, request, db: InfrahubDatabase, default_branch: Branch, br return default_branch return await registry.get_branch(db=db, branch=branch_name) + @pytest.fixture(scope="class") + def schema_generic_with_new_fields(self, schema_generic_base: dict[str, Any]) -> dict[str, Any]: + schema_dict = schema_generic_base.copy() + schema_dict["attributes"].append( + {"name": "generic_attr_text_new", "kind": "Text", "optional": True}, + ) + return schema_dict + @pytest.fixture(scope="class") def schema_specific_one_with_overrides(self, schema_specific_one_base: dict[str, Any]) -> dict[str, Any]: schema_dict = schema_specific_one_base.copy() @@ -284,7 +293,7 @@ def schema_specific_three_with_overrides(self, schema_specific_three_base: dict[ @pytest.fixture(scope="class") def schema_step_02( self, - schema_generic_base, + schema_generic_with_new_fields, schema_specific_one_with_overrides, schema_specific_two_with_new_fields, schema_specific_three_with_overrides, @@ -292,7 +301,7 @@ def schema_step_02( ) -> dict[str, Any]: return { "version": "1.0", - "generics": [schema_generic_base], + "generics": [schema_generic_with_new_fields], "nodes": [ schema_specific_one_with_overrides, schema_specific_two_with_new_fields, @@ -312,7 +321,6 @@ def schema_specific_three_with_deleted_override( @pytest.fixture(scope="class") def schema_step_03( self, - schema_generic_base, schema_specific_one_with_overrides, schema_specific_two_with_new_fields, schema_specific_three_with_deleted_override, @@ -320,7 +328,6 @@ def schema_step_03( ) -> dict[str, Any]: return { "version": "1.0", - "generics": [schema_generic_base], "nodes": [ schema_specific_one_with_overrides, schema_specific_two_with_new_fields, @@ -331,9 +338,9 @@ def schema_step_03( @pytest.fixture(scope="class") def schema_generic_with_weight_updates( - self, db: InfrahubDatabase, schema_generic_base: dict[str, Any] + self, db: InfrahubDatabase, schema_generic_with_new_fields: dict[str, Any] ) -> dict[str, Any]: - schema_dict = schema_generic_base.copy() + schema_dict = schema_generic_with_new_fields.copy() for attr in schema_dict["attributes"]: if "order_weight" in attr: attr["order_weight"] += 1 @@ -481,11 +488,26 @@ async def test_step02_check_add_specific_overrides( "diff": { "added": {}, "changed": { + GENERIC_KIND: { + "added": {}, + "changed": { + "attributes": { + "added": { + "generic_attr_text_new": None, + }, + "changed": {}, + "removed": {}, + }, + }, + "removed": {}, + }, SPECIFIC_ONE_KIND: { "added": {}, "changed": { "attributes": { - "added": {}, + "added": { + "generic_attr_text_new": None, + }, "changed": { "generic_attr_text": { "added": {}, @@ -531,6 +553,7 @@ async def test_step02_check_add_specific_overrides( "attributes": { "added": { "specific_attr_text": None, + "generic_attr_text_new": None, }, "changed": {}, "removed": {}, @@ -551,6 +574,7 @@ async def test_step02_check_add_specific_overrides( "attributes": { "added": { "specific_attr_num": None, + "generic_attr_text_new": None, }, "changed": { "generic_attr_text": { @@ -636,13 +660,19 @@ async def test_step02_load_schema_with_overrides( updated_schema_branch = await registry.schema.load_schema_from_db(db=db, branch=branch) generic_schema = updated_schema_branch.get(GENERIC_KIND, duplicate=False) - assert set(generic_schema.attribute_names) == {"generic_attr_text", "generic_attr_num", "generic_required_attr"} + assert set(generic_schema.attribute_names) == { + "generic_attr_text", + "generic_attr_text_new", + "generic_attr_num", + "generic_required_attr", + } assert set(generic_schema.relationship_names) >= {"things", "favorite_thing"} generic_attr_text_schema = generic_schema.get_attribute("generic_attr_text") assert generic_attr_text_schema.default_value is None specific_one_schema = updated_schema_branch.get(SPECIFIC_ONE_KIND, duplicate=False) assert set(specific_one_schema.attribute_names) == { "generic_attr_text", + "generic_attr_text_new", "generic_attr_num", "generic_required_attr", } @@ -660,6 +690,7 @@ async def test_step02_load_schema_with_overrides( specific_two_schema = updated_schema_branch.get(SPECIFIC_TWO_KIND, duplicate=False) assert set(specific_two_schema.attribute_names) == { "generic_attr_text", + "generic_attr_text_new", "generic_attr_num", "generic_required_attr", "specific_attr_text", @@ -670,6 +701,7 @@ async def test_step02_load_schema_with_overrides( specific_three_schema = updated_schema_branch.get(SPECIFIC_THREE_KIND, duplicate=False) assert set(specific_three_schema.attribute_names) == { "generic_attr_text", + "generic_attr_text_new", "generic_attr_num", "generic_required_attr", "specific_attr_num", @@ -698,7 +730,7 @@ async def test_step02_load_schema_with_overrides( async def _finalize_deleted_fields(self, db: InfrahubDatabase, branch: Branch, full_schema_dict: dict[str, Any]): current_schema_branch = await registry.schema.load_schema_from_db(db=db, branch=branch) - for schema_dict in full_schema_dict["generics"] + full_schema_dict["nodes"]: + for schema_dict in full_schema_dict.get("generics", []) + full_schema_dict.get("nodes", []): for attr in schema_dict.get("attributes", []): if attr.get("state") == HashableModelState.ABSENT.value: schema = current_schema_branch.get( @@ -802,11 +834,17 @@ async def test_step03_load_schema_with_deleted_override( updated_schema_branch = await registry.schema.load_schema_from_db(db=db, branch=branch) generic_schema = updated_schema_branch.get(GENERIC_KIND, duplicate=False) - assert set(generic_schema.attribute_names) == {"generic_attr_num", "generic_attr_text", "generic_required_attr"} + assert set(generic_schema.attribute_names) == { + "generic_attr_num", + "generic_attr_text", + "generic_attr_text_new", + "generic_required_attr", + } assert set(generic_schema.relationship_names) >= {"things", "favorite_thing"} specific_one_schema = updated_schema_branch.get(SPECIFIC_ONE_KIND, duplicate=False) assert set(specific_one_schema.attribute_names) == { "generic_attr_text", + "generic_attr_text_new", "generic_attr_num", "generic_required_attr", } @@ -815,6 +853,7 @@ async def test_step03_load_schema_with_deleted_override( specific_two_schema = updated_schema_branch.get(SPECIFIC_TWO_KIND, duplicate=False) assert set(specific_two_schema.attribute_names) == { "generic_attr_text", + "generic_attr_text_new", "generic_attr_num", "generic_required_attr", "specific_attr_text", @@ -825,6 +864,7 @@ async def test_step03_load_schema_with_deleted_override( specific_three_schema = updated_schema_branch.get(SPECIFIC_THREE_KIND, duplicate=False) assert set(specific_three_schema.attribute_names) == { "generic_attr_text", + "generic_attr_text_new", "generic_attr_num", "generic_required_attr", "specific_attr_num", @@ -948,7 +988,12 @@ async def test_step04_load_schema_with_updated_generic_weight( updated_schema_branch = await registry.schema.load_schema_from_db(db=db, branch=branch) generic_schema = updated_schema_branch.get(GENERIC_KIND, duplicate=False) - assert set(generic_schema.attribute_names) == {"generic_attr_num", "generic_attr_text", "generic_required_attr"} + assert set(generic_schema.attribute_names) == { + "generic_attr_num", + "generic_attr_text", + "generic_attr_text_new", + "generic_required_attr", + } assert set(generic_schema.relationship_names) >= {"things", "favorite_thing"} weights_by_field_name = { field.name: field.order_weight for field in generic_schema.attributes + generic_schema.relationships @@ -960,6 +1005,7 @@ async def test_step04_load_schema_with_updated_generic_weight( specific_one_schema = updated_schema_branch.get(SPECIFIC_ONE_KIND, duplicate=False) assert set(specific_one_schema.attribute_names) == { "generic_attr_text", + "generic_attr_text_new", "generic_attr_num", "generic_required_attr", } @@ -978,6 +1024,7 @@ async def test_step04_load_schema_with_updated_generic_weight( specific_two_schema = updated_schema_branch.get(SPECIFIC_TWO_KIND, duplicate=False) assert set(specific_two_schema.attribute_names) == { "generic_attr_text", + "generic_attr_text_new", "generic_attr_num", "generic_required_attr", "specific_attr_text", @@ -996,6 +1043,7 @@ async def test_step04_load_schema_with_updated_generic_weight( specific_three_schema = updated_schema_branch.get(SPECIFIC_THREE_KIND, duplicate=False) assert set(specific_three_schema.attribute_names) == { "generic_attr_text", + "generic_attr_text_new", "generic_attr_num", "generic_required_attr", "specific_attr_num", @@ -1199,12 +1247,13 @@ async def test_step05_load_schema_with_generic_deletes( updated_schema_branch = await registry.schema.load_schema_from_db(db=db, branch=branch) generic_schema = updated_schema_branch.get(GENERIC_KIND, duplicate=False) - assert set(generic_schema.attribute_names) == {"generic_attr_num"} + assert set(generic_schema.attribute_names) == {"generic_attr_num", "generic_attr_text_new"} assert "things" not in generic_schema.relationship_names assert "favorite_thing" in generic_schema.relationship_names specific_one_schema = updated_schema_branch.get(SPECIFIC_ONE_KIND, duplicate=False) assert set(specific_one_schema.attribute_names) == { "generic_attr_text", + "generic_attr_text_new", "generic_attr_num", "generic_required_attr", } @@ -1218,7 +1267,11 @@ async def test_step05_load_schema_with_generic_deletes( assert overridden_things_rel_schema.max_count == 3 assert overridden_things_rel_schema.order_weight == 3011 specific_two_schema = updated_schema_branch.get(SPECIFIC_TWO_KIND, duplicate=False) - assert set(specific_two_schema.attribute_names) == {"generic_attr_num", "specific_attr_text"} + assert set(specific_two_schema.attribute_names) == { + "generic_attr_text_new", + "generic_attr_num", + "specific_attr_text", + } assert set(specific_two_schema.local_attribute_names) == {"specific_attr_text"} assert set(specific_two_schema.relationship_names) >= {"favorite_thing", "specific_things"} assert "things" not in specific_two_schema.relationship_names @@ -1226,6 +1279,7 @@ async def test_step05_load_schema_with_generic_deletes( specific_three_schema = updated_schema_branch.get(SPECIFIC_THREE_KIND, duplicate=False) assert set(specific_three_schema.attribute_names) == { "generic_attr_num", + "generic_attr_text_new", "specific_attr_num", "generic_required_attr", } @@ -1324,22 +1378,31 @@ async def test_step06_load_schema_with_override_deletes( updated_schema_branch = await registry.schema.load_schema_from_db(db=db, branch=branch) generic_schema = updated_schema_branch.get(GENERIC_KIND, duplicate=False) - assert set(generic_schema.attribute_names) == {"generic_attr_num"} + assert set(generic_schema.attribute_names) == {"generic_attr_num", "generic_attr_text_new"} assert "things" not in generic_schema.relationship_names assert "favorite_thing" in generic_schema.relationship_names specific_one_schema = updated_schema_branch.get(SPECIFIC_ONE_KIND, duplicate=False) - assert set(specific_one_schema.attribute_names) == {"generic_attr_num", "generic_required_attr"} + assert set(specific_one_schema.attribute_names) == { + "generic_attr_text_new", + "generic_attr_num", + "generic_required_attr", + } assert set(specific_one_schema.local_attribute_names) == {"generic_required_attr"} assert "things" not in specific_one_schema.relationship_names assert "favorite_thing" in specific_one_schema.relationship_names specific_two_schema = updated_schema_branch.get(SPECIFIC_TWO_KIND, duplicate=False) - assert set(specific_two_schema.attribute_names) == {"generic_attr_num", "specific_attr_text"} + assert set(specific_two_schema.attribute_names) == { + "generic_attr_text_new", + "generic_attr_num", + "specific_attr_text", + } assert set(specific_two_schema.local_attribute_names) == {"specific_attr_text"} assert set(specific_two_schema.relationship_names) >= {"favorite_thing", "specific_things"} assert "things" not in specific_two_schema.relationship_names assert set(specific_two_schema.local_relationship_names) >= {"specific_things"} specific_three_schema = updated_schema_branch.get(SPECIFIC_THREE_KIND, duplicate=False) assert set(specific_three_schema.attribute_names) == { + "generic_attr_text_new", "generic_attr_num", "specific_attr_num", "generic_required_attr", @@ -1368,7 +1431,11 @@ class TestSchemaLifecycleGenericUpdates(SchemaLifecycleGenericBase): async def validate_database( self, db: InfrahubDatabase, branch: Branch, inheriting_schemas: list[NodeSchema] ) -> list[str]: - return await self._validate_inherited_schema_fields(db=db, branch=branch, inheriting_schemas=inheriting_schemas) + errors = await self._validate_inherited_schema_fields( + db=db, branch=branch, inheriting_schemas=inheriting_schemas + ) + errors.extend(await validate_no_duplicate_attributes(db=db, branch=branch)) + return errors async def _validate_inherited_schema_fields( self, db: InfrahubDatabase, branch: Branch, inheriting_schemas: list[NodeSchema] diff --git a/backend/tests/unit/conftest.py b/backend/tests/unit/conftest.py index d23ded3009..f202a57b39 100644 --- a/backend/tests/unit/conftest.py +++ b/backend/tests/unit/conftest.py @@ -4,7 +4,7 @@ import sys from itertools import islice from pathlib import Path -from typing import Any +from typing import Any, Generator from unittest.mock import patch import pytest @@ -64,6 +64,7 @@ from infrahub.database import InfrahubDatabase from infrahub.dependencies.registry import build_component_registry from infrahub.git import InfrahubRepository +from infrahub.graphql.registry import registry as graphql_registry from infrahub.services.adapters.workflow.local import WorkflowLocalExecution from infrahub.workers.dependencies import build_workflow from tests.helpers.file_repo import FileRepo @@ -153,6 +154,14 @@ def git_repos_dir(tmp_path: Path) -> Path: return repos_dir +@pytest.fixture +def reset_graphql_schema_between_tests() -> Generator: + """This fixture can be used when testing with GraphQL enums as the schema looks completely different.""" + graphql_registry.clear_cache() + yield + graphql_registry.clear_cache() + + @pytest.fixture async def git_fixture_repo(git_sources_dir: Path, git_repos_dir: Path) -> InfrahubRepository: FileRepo(name="test_base", sources_directory=git_sources_dir) diff --git a/backend/tests/unit/core/migrations/graph/test_040.py b/backend/tests/unit/core/migrations/graph/test_040.py index e3a95a77b4..059d9bb813 100644 --- a/backend/tests/unit/core/migrations/graph/test_040.py +++ b/backend/tests/unit/core/migrations/graph/test_040.py @@ -1,407 +1,101 @@ -from dataclasses import dataclass -from typing import Any -from unittest.mock import AsyncMock - -import pytest - -from infrahub.core.branch import Branch +from infrahub.core import registry +from infrahub.core.branch.models import Branch +from infrahub.core.constants import BranchSupportType, SchemaPathType from infrahub.core.initialization import create_branch from infrahub.core.manager import NodeManager -from infrahub.core.migrations.graph.m040_profile_attrs_in_db import Migration040 +from infrahub.core.migrations.graph.m040_duplicated_attributes import Migration040 +from infrahub.core.migrations.schema.models import SchemaApplyMigrationData +from infrahub.core.migrations.schema.tasks import schema_apply_migrations +from infrahub.core.models import SchemaUpdateMigrationInfo from infrahub.core.node import Node +from infrahub.core.path import SchemaPath +from infrahub.core.schema.attribute_schema import AttributeSchema +from infrahub.core.schema.schema_branch import SchemaBranch from infrahub.database import InfrahubDatabase -from infrahub.profiles.node_applier import NodeProfilesApplier -from tests.helpers.test_app import TestInfrahubApp - - -@dataclass -class AttributeProfileDetails: - attribute_name: str - value: Any - is_default: bool - source_id: str | None = None - - @property - def is_from_profile(self) -> bool: - return self.source_id is not None - - -class WrappedMigration040(Migration040): - async def _get_profile_applier(self, db: InfrahubDatabase, branch_name: str) -> NodeProfilesApplier: - profile_applier = await super()._get_profile_applier(db=db, branch_name=branch_name) - if isinstance(profile_applier, AsyncMock): - return profile_applier - wrapped_profile_applier = AsyncMock(wraps=profile_applier) - self._appliers_by_branch[branch_name] = wrapped_profile_applier - return wrapped_profile_applier - - -@pytest.mark.skip(reason="Is flaky. And waiting on updates to the migration") -class TestMigration040(TestInfrahubApp): - @pytest.fixture - async def profile_1(self, db: InfrahubDatabase, default_branch: Branch, criticality_schema) -> Node: - profile = await Node.init(db=db, schema="ProfileTestCriticality") - await profile.new(db=db, profile_name="profile_1", is_true=True, color="profile1", profile_priority=1001) - await profile.save(db=db) - return profile - - @pytest.fixture - async def profile_2(self, db: InfrahubDatabase, default_branch: Branch, criticality_schema) -> Node: - profile = await Node.init(db=db, schema="ProfileTestCriticality") - await profile.new( - db=db, - profile_name="profile_2", - description="profile2", - is_false=False, - is_true=False, - color="profile2", - profile_priority=1002, - ) - await profile.save(db=db) - return profile - - @pytest.fixture - async def value_branch(self, db: InfrahubDatabase, default_branch: Branch) -> Branch: - return await create_branch(db=db, branch_name="value_branch") - - @pytest.fixture - async def profile_1_value_update(self, db: InfrahubDatabase, value_branch: Branch, profile_1: Node) -> Node: - profile = await NodeManager.get_one(db=db, branch=value_branch, id=profile_1.id) - profile.description.value = "profile1_value_update" - profile.is_true.value = False - profile.color.value = "profile1_value_update" - await profile.save(db=db) - return profile - - @pytest.fixture - async def priority_branch(self, db: InfrahubDatabase, default_branch: Branch) -> Branch: - return await create_branch(db=db, branch_name="priority_branch") - - @pytest.fixture - async def profile_2_priority_update(self, db: InfrahubDatabase, priority_branch: Branch, profile_2: Node) -> Node: - profile = await NodeManager.get_one(db=db, branch=priority_branch, id=profile_2.id) - profile.profile_priority.value = 999 - await profile.save(db=db) - return profile - - @pytest.fixture - async def deleted_profile_branch(self, db: InfrahubDatabase, default_branch: Branch) -> Branch: - return await create_branch(db=db, branch_name="deleted_profile_branch") - - @pytest.fixture - async def profile_2_deleted(self, db: InfrahubDatabase, deleted_profile_branch: Branch, profile_2: Node): - profile = await NodeManager.get_one(db=db, branch=deleted_profile_branch, id=profile_2.id) - await profile.delete(db=db) - - @pytest.fixture - async def deleted_node_branch(self, db: InfrahubDatabase, default_branch: Branch) -> Branch: - return await create_branch(db=db, branch_name="deleted_node_branch") - - @pytest.fixture - async def criticality_low_deleted(self, db: InfrahubDatabase, deleted_node_branch: Branch, criticality_low: Node): - profile = await NodeManager.get_one(db=db, branch=deleted_node_branch, id=criticality_low.id) - await profile.delete(db=db) - - @pytest.fixture - async def load_data( - self, - db: InfrahubDatabase, - default_branch: Branch, - criticality_low: Node, - criticality_medium: Node, - criticality_high: Node, - profile_1: Node, - profile_2: Node, - ): - crit_low = await NodeManager.get_one(db=db, id=criticality_low.id) - await crit_low.profiles.update(db=db, data=[profile_1]) - await crit_low.save(db=db) - crit_medium = await NodeManager.get_one(db=db, id=criticality_medium.id) - await crit_medium.profiles.update(db=db, data=[profile_1, profile_2]) - await crit_medium.save(db=db) - crit_high = await NodeManager.get_one(db=db, id=criticality_high.id) - await crit_high.profiles.update(db=db, data=[profile_2]) - await crit_high.save(db=db) - - @pytest.fixture - async def load_branch_data( - self, - value_branch: Branch, - profile_1_value_update: Node, - priority_branch: Branch, - profile_2_priority_update: Node, - deleted_profile_branch: Branch, - profile_2_deleted: Node, - deleted_node_branch: Branch, - criticality_low_deleted: Node, - ): - pass - - def validate_node( - self, - original_node: Node, - updated_node: Node, - expected_profile_attrs: list[AttributeProfileDetails], - ): - expected_profile_attrs_by_name = {attr.attribute_name: attr for attr in expected_profile_attrs} - for attribute_name in updated_node._attributes: - current_attribute = getattr(updated_node, attribute_name) - if expected_profile_attr := expected_profile_attrs_by_name.get(attribute_name): - assert current_attribute.value == expected_profile_attr.value - assert current_attribute.is_default == expected_profile_attr.is_default - assert current_attribute.is_from_profile == expected_profile_attr.is_from_profile - assert current_attribute.source_id == expected_profile_attr.source_id - continue - original_attribute = getattr(original_node, attribute_name) - assert current_attribute.value == original_attribute.value - assert current_attribute.is_default == original_attribute.is_default - assert current_attribute.is_from_profile == original_attribute.is_from_profile - assert current_attribute.source_id == original_attribute.source_id - - async def test_migration_040( +from tests.helpers.db_validation import validate_no_duplicate_attributes + + +class TestMigration040: + async def _prepare_branch(self, db: InfrahubDatabase, branch: Branch): + previous_schema_branch = registry.schema.get_schema_branch(name=branch.name) + previous_car_schema = previous_schema_branch.get_node(name="TestCar", duplicate=False) + new_car_schema = previous_car_schema.duplicate() + new_car_schema.attributes.append(AttributeSchema(name="smell", kind="Text", branch=BranchSupportType.AWARE)) + new_schema_branch = previous_schema_branch.duplicate() + new_schema_branch.set(name="TestCar", schema=new_car_schema) + + # reproduces the error state by running the same migration concurrently so that the attribute is duplicated + migration_errors = await schema_apply_migrations( + message=SchemaApplyMigrationData( + branch=branch, + previous_schema=previous_schema_branch, + new_schema=new_schema_branch, + migrations=[ + SchemaUpdateMigrationInfo( + path=SchemaPath(schema_kind="TestCar", path_type=SchemaPathType.ATTRIBUTE, field_name="smell"), + migration_name="node.attribute.add", + ) + ] + * 3, + ) + ) + assert not migration_errors + + # validate the error state + errors = await validate_no_duplicate_attributes(db=db, branch=branch) + assert errors + + registry.schema.set(name="TestCar", branch=branch.name, schema=new_car_schema) + + async def test_clean_duplicated_attributes( self, db: InfrahubDatabase, default_branch: Branch, - criticality_low: Node, - criticality_medium: Node, - criticality_high: Node, - profile_1: Node, - profile_2: Node, - load_data, - load_branch_data, - value_branch: Branch, - priority_branch: Branch, - deleted_profile_branch: Branch, - deleted_node_branch: Branch, - ): - migration = WrappedMigration040() - execution_result = await migration.execute(db=db) - assert not execution_result.errors - validation_result = await migration.validate_migration(db=db) - assert not validation_result.errors - - # validate node-level changes on main - updated_criticality_low = await NodeManager.get_one(db=db, id=criticality_low.id, include_source=True) - self.validate_node( - original_node=criticality_low, - updated_node=updated_criticality_low, - expected_profile_attrs=[ - AttributeProfileDetails( - attribute_name="color", - value="profile1", - is_default=False, - source_id=profile_1.id, - ), - AttributeProfileDetails( - attribute_name="is_true", - value=True, - is_default=False, - source_id=profile_1.id, - ), - ], - ) - updated_criticality_medium = await NodeManager.get_one(db=db, id=criticality_medium.id, include_source=True) - self.validate_node( - original_node=criticality_medium, - updated_node=updated_criticality_medium, - expected_profile_attrs=[ - AttributeProfileDetails(attribute_name="is_true", value=True, is_default=False, source_id=profile_1.id), - AttributeProfileDetails( - attribute_name="is_false", value=False, is_default=False, source_id=profile_2.id - ), - ], - ) - updated_criticality_high = await NodeManager.get_one(db=db, id=criticality_high.id, include_source=True) - self.validate_node( - original_node=criticality_high, - updated_node=updated_criticality_high, - expected_profile_attrs=[ - AttributeProfileDetails( - attribute_name="is_false", value=False, is_default=False, source_id=profile_2.id - ), - AttributeProfileDetails( - attribute_name="is_true", value=False, is_default=False, source_id=profile_2.id - ), - ], - ) - - # validate node-level changes on value branch - updated_criticality_low = await NodeManager.get_one( - db=db, branch=value_branch, id=criticality_low.id, include_source=True - ) - self.validate_node( - original_node=criticality_low, - updated_node=updated_criticality_low, - expected_profile_attrs=[ - AttributeProfileDetails( - attribute_name="description", - value="profile1_value_update", - is_default=False, - source_id=profile_1.id, - ), - AttributeProfileDetails( - attribute_name="color", - value="profile1_value_update", - is_default=False, - source_id=profile_1.id, - ), - AttributeProfileDetails( - attribute_name="is_true", - value=False, - is_default=False, - source_id=profile_1.id, - ), - ], - ) - updated_criticality_medium = await NodeManager.get_one( - db=db, branch=value_branch, id=criticality_medium.id, include_source=True - ) - self.validate_node( - original_node=criticality_medium, - updated_node=updated_criticality_medium, - expected_profile_attrs=[ - AttributeProfileDetails( - attribute_name="is_true", value=False, is_default=False, source_id=profile_1.id - ), - AttributeProfileDetails( - attribute_name="is_false", value=False, is_default=False, source_id=profile_2.id - ), - ], - ) - updated_criticality_high = await NodeManager.get_one( - db=db, branch=value_branch, id=criticality_high.id, include_source=True - ) - self.validate_node( - original_node=criticality_high, - updated_node=updated_criticality_high, - expected_profile_attrs=[], - ) - - # validate node-level changes on priority branch - updated_criticality_low = await NodeManager.get_one( - db=db, branch=priority_branch, id=criticality_low.id, include_source=True - ) - self.validate_node( - original_node=criticality_low, - updated_node=updated_criticality_low, - expected_profile_attrs=[], - ) - updated_criticality_medium = await NodeManager.get_one( - db=db, branch=priority_branch, id=criticality_medium.id, include_source=True - ) - self.validate_node( - original_node=criticality_medium, - updated_node=updated_criticality_medium, - expected_profile_attrs=[ - AttributeProfileDetails( - attribute_name="is_true", value=False, is_default=False, source_id=profile_2.id - ), - AttributeProfileDetails( - attribute_name="is_false", value=False, is_default=False, source_id=profile_2.id - ), - ], - ) - updated_criticality_high = await NodeManager.get_one( - db=db, branch=priority_branch, id=criticality_high.id, include_source=True - ) - self.validate_node( - original_node=criticality_high, - updated_node=updated_criticality_high, - expected_profile_attrs=[ - AttributeProfileDetails( - attribute_name="is_true", value=False, is_default=False, source_id=profile_2.id - ), - AttributeProfileDetails( - attribute_name="is_false", value=False, is_default=False, source_id=profile_2.id - ), - ], - ) - - # validate node-level changes on deleted profile branch - updated_criticality_low = await NodeManager.get_one( - db=db, branch=deleted_profile_branch, id=criticality_low.id, include_source=True - ) - self.validate_node( - original_node=criticality_low, - updated_node=updated_criticality_low, - # branch would need to be rebased to get profile updates applied on main - expected_profile_attrs=[], - ) - updated_criticality_medium = await NodeManager.get_one( - db=db, branch=deleted_profile_branch, id=criticality_medium.id, include_source=True - ) - self.validate_node( - original_node=criticality_medium, - updated_node=updated_criticality_medium, - expected_profile_attrs=[ - AttributeProfileDetails(attribute_name="is_true", value=True, is_default=False, source_id=profile_1.id), - ], - ) - updated_criticality_high = await NodeManager.get_one( - db=db, branch=deleted_profile_branch, id=criticality_high.id, include_source=True - ) - self.validate_node( - original_node=criticality_high, - updated_node=updated_criticality_high, - expected_profile_attrs=[], - ) - - # validate node-level changes on deleted node branch - updated_criticality_medium = await NodeManager.get_one( - db=db, branch=deleted_node_branch, id=criticality_medium.id, include_source=True - ) - self.validate_node( - original_node=criticality_medium, - updated_node=updated_criticality_medium, - # branch would need to be rebased to get profile updates applied on main - expected_profile_attrs=[], - ) - updated_criticality_high = await NodeManager.get_one( - db=db, branch=deleted_node_branch, id=criticality_high.id, include_source=True - ) - self.validate_node( - original_node=criticality_high, - updated_node=updated_criticality_high, - expected_profile_attrs=[], - ) - - # validate apply_profiles is only called on the required nodes - applier_branches = set(migration._appliers_by_branch.keys()) - assert applier_branches == { - default_branch.name, - value_branch.name, - priority_branch.name, - deleted_profile_branch.name, - deleted_node_branch.name, - } - - main_profile_applier = migration._appliers_by_branch[default_branch.name] - assert main_profile_applier.apply_profiles.call_count == 3 - refreshed_node_uuids = { - call_args[1]["node"].id for call_args in main_profile_applier.apply_profiles.call_args_list - } - assert refreshed_node_uuids == {criticality_low.id, criticality_medium.id, criticality_high.id} - - value_profile_applier = migration._appliers_by_branch[value_branch.name] - assert value_profile_applier.apply_profiles.call_count == 2 - refreshed_node_uuids = { - call_args[1]["node"].id for call_args in value_profile_applier.apply_profiles.call_args_list - } - assert refreshed_node_uuids == {criticality_low.id, criticality_medium.id} - - priority_profile_applier = migration._appliers_by_branch[priority_branch.name] - assert priority_profile_applier.apply_profiles.call_count == 2 - refreshed_node_uuids = { - call_args[1]["node"].id for call_args in priority_profile_applier.apply_profiles.call_args_list - } - assert refreshed_node_uuids == {criticality_medium.id, criticality_high.id} - - deleted_profile_profile_applier = migration._appliers_by_branch[deleted_profile_branch.name] - assert deleted_profile_profile_applier.apply_profiles.call_count == 2 - refreshed_node_uuids = { - call_args[1]["node"].id for call_args in deleted_profile_profile_applier.apply_profiles.call_args_list - } - assert refreshed_node_uuids == {criticality_medium.id, criticality_high.id} - - deleted_node_profile_applier = migration._appliers_by_branch[deleted_node_branch.name] - assert deleted_node_profile_applier.apply_profiles.call_count == 0 + car_person_schema: SchemaBranch, + car_accord_main: Node, + car_camry_main: Node, + ) -> None: + branch = await create_branch(db=db, branch_name="dup-attrs") + + # set the error state on main + await self._prepare_branch(db=db, branch=default_branch) + # set the error state on a branch + await self._prepare_branch(db=db, branch=branch) + + # set values on main + accord_main = await NodeManager.get_one(db=db, id=car_accord_main.id) + accord_main.smell.value = "good" + await accord_main.save(db=db) + camry_main = await NodeManager.get_one(db=db, id=car_camry_main.id) + camry_main.smell.value = "okay" + await camry_main.save(db=db) + + # set values on a branch + accord_branch = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=branch) + accord_branch.smell.value = "bad" + await accord_branch.save(db=db) + camry_branch = await NodeManager.get_one(db=db, id=car_camry_main.id, branch=branch) + camry_branch.smell.value = "terrible" + await camry_branch.save(db=db) + + migration = Migration040.init(db=db) + result = await migration.execute(db=db) + assert not result.errors + + # validate the result + errors = await validate_no_duplicate_attributes(db=db, branch=default_branch) + assert not errors + errors = await validate_no_duplicate_attributes(db=db, branch=branch) + assert not errors + + # validate values on main + accord_main = await NodeManager.get_one(db=db, id=car_accord_main.id) + assert accord_main.smell.value == "good" + camry_main = await NodeManager.get_one(db=db, id=car_camry_main.id) + assert camry_main.smell.value == "okay" + + # validate values on a branch + accord_branch = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=branch) + assert accord_branch.smell.value == "bad" + camry_branch = await NodeManager.get_one(db=db, id=car_camry_main.id, branch=branch) + assert camry_branch.smell.value == "terrible" diff --git a/backend/tests/unit/core/migrations/graph/test_041.py b/backend/tests/unit/core/migrations/graph/test_041.py new file mode 100644 index 0000000000..16058f0ec2 --- /dev/null +++ b/backend/tests/unit/core/migrations/graph/test_041.py @@ -0,0 +1,407 @@ +from dataclasses import dataclass +from typing import Any +from unittest.mock import AsyncMock + +import pytest + +from infrahub.core.branch import Branch +from infrahub.core.initialization import create_branch +from infrahub.core.manager import NodeManager +from infrahub.core.migrations.graph.m041_profile_attrs_in_db import Migration041 +from infrahub.core.node import Node +from infrahub.database import InfrahubDatabase +from infrahub.profiles.node_applier import NodeProfilesApplier +from tests.helpers.test_app import TestInfrahubApp + + +@dataclass +class AttributeProfileDetails: + attribute_name: str + value: Any + is_default: bool + source_id: str | None = None + + @property + def is_from_profile(self) -> bool: + return self.source_id is not None + + +class WrappedMigration041(Migration041): + async def _get_profile_applier(self, db: InfrahubDatabase, branch_name: str) -> NodeProfilesApplier: + profile_applier = await super()._get_profile_applier(db=db, branch_name=branch_name) + if isinstance(profile_applier, AsyncMock): + return profile_applier + wrapped_profile_applier = AsyncMock(wraps=profile_applier) + self._appliers_by_branch[branch_name] = wrapped_profile_applier + return wrapped_profile_applier + + +@pytest.mark.skip(reason="Is flaky. And waiting on updates to the migration") +class TestMigration041(TestInfrahubApp): + @pytest.fixture + async def profile_1(self, db: InfrahubDatabase, default_branch: Branch, criticality_schema) -> Node: + profile = await Node.init(db=db, schema="ProfileTestCriticality") + await profile.new(db=db, profile_name="profile_1", is_true=True, color="profile1", profile_priority=1001) + await profile.save(db=db) + return profile + + @pytest.fixture + async def profile_2(self, db: InfrahubDatabase, default_branch: Branch, criticality_schema) -> Node: + profile = await Node.init(db=db, schema="ProfileTestCriticality") + await profile.new( + db=db, + profile_name="profile_2", + description="profile2", + is_false=False, + is_true=False, + color="profile2", + profile_priority=1002, + ) + await profile.save(db=db) + return profile + + @pytest.fixture + async def value_branch(self, db: InfrahubDatabase, default_branch: Branch) -> Branch: + return await create_branch(db=db, branch_name="value_branch") + + @pytest.fixture + async def profile_1_value_update(self, db: InfrahubDatabase, value_branch: Branch, profile_1: Node) -> Node: + profile = await NodeManager.get_one(db=db, branch=value_branch, id=profile_1.id) + profile.description.value = "profile1_value_update" + profile.is_true.value = False + profile.color.value = "profile1_value_update" + await profile.save(db=db) + return profile + + @pytest.fixture + async def priority_branch(self, db: InfrahubDatabase, default_branch: Branch) -> Branch: + return await create_branch(db=db, branch_name="priority_branch") + + @pytest.fixture + async def profile_2_priority_update(self, db: InfrahubDatabase, priority_branch: Branch, profile_2: Node) -> Node: + profile = await NodeManager.get_one(db=db, branch=priority_branch, id=profile_2.id) + profile.profile_priority.value = 999 + await profile.save(db=db) + return profile + + @pytest.fixture + async def deleted_profile_branch(self, db: InfrahubDatabase, default_branch: Branch) -> Branch: + return await create_branch(db=db, branch_name="deleted_profile_branch") + + @pytest.fixture + async def profile_2_deleted(self, db: InfrahubDatabase, deleted_profile_branch: Branch, profile_2: Node): + profile = await NodeManager.get_one(db=db, branch=deleted_profile_branch, id=profile_2.id) + await profile.delete(db=db) + + @pytest.fixture + async def deleted_node_branch(self, db: InfrahubDatabase, default_branch: Branch) -> Branch: + return await create_branch(db=db, branch_name="deleted_node_branch") + + @pytest.fixture + async def criticality_low_deleted(self, db: InfrahubDatabase, deleted_node_branch: Branch, criticality_low: Node): + profile = await NodeManager.get_one(db=db, branch=deleted_node_branch, id=criticality_low.id) + await profile.delete(db=db) + + @pytest.fixture + async def load_data( + self, + db: InfrahubDatabase, + default_branch: Branch, + criticality_low: Node, + criticality_medium: Node, + criticality_high: Node, + profile_1: Node, + profile_2: Node, + ): + crit_low = await NodeManager.get_one(db=db, id=criticality_low.id) + await crit_low.profiles.update(db=db, data=[profile_1]) + await crit_low.save(db=db) + crit_medium = await NodeManager.get_one(db=db, id=criticality_medium.id) + await crit_medium.profiles.update(db=db, data=[profile_1, profile_2]) + await crit_medium.save(db=db) + crit_high = await NodeManager.get_one(db=db, id=criticality_high.id) + await crit_high.profiles.update(db=db, data=[profile_2]) + await crit_high.save(db=db) + + @pytest.fixture + async def load_branch_data( + self, + value_branch: Branch, + profile_1_value_update: Node, + priority_branch: Branch, + profile_2_priority_update: Node, + deleted_profile_branch: Branch, + profile_2_deleted: Node, + deleted_node_branch: Branch, + criticality_low_deleted: Node, + ): + pass + + def validate_node( + self, + original_node: Node, + updated_node: Node, + expected_profile_attrs: list[AttributeProfileDetails], + ): + expected_profile_attrs_by_name = {attr.attribute_name: attr for attr in expected_profile_attrs} + for attribute_name in updated_node._attributes: + current_attribute = getattr(updated_node, attribute_name) + if expected_profile_attr := expected_profile_attrs_by_name.get(attribute_name): + assert current_attribute.value == expected_profile_attr.value + assert current_attribute.is_default == expected_profile_attr.is_default + assert current_attribute.is_from_profile == expected_profile_attr.is_from_profile + assert current_attribute.source_id == expected_profile_attr.source_id + continue + original_attribute = getattr(original_node, attribute_name) + assert current_attribute.value == original_attribute.value + assert current_attribute.is_default == original_attribute.is_default + assert current_attribute.is_from_profile == original_attribute.is_from_profile + assert current_attribute.source_id == original_attribute.source_id + + async def test_migration_041( + self, + db: InfrahubDatabase, + default_branch: Branch, + criticality_low: Node, + criticality_medium: Node, + criticality_high: Node, + profile_1: Node, + profile_2: Node, + load_data, + load_branch_data, + value_branch: Branch, + priority_branch: Branch, + deleted_profile_branch: Branch, + deleted_node_branch: Branch, + ): + migration = WrappedMigration041() + execution_result = await migration.execute(db=db) + assert not execution_result.errors + validation_result = await migration.validate_migration(db=db) + assert not validation_result.errors + + # validate node-level changes on main + updated_criticality_low = await NodeManager.get_one(db=db, id=criticality_low.id, include_source=True) + self.validate_node( + original_node=criticality_low, + updated_node=updated_criticality_low, + expected_profile_attrs=[ + AttributeProfileDetails( + attribute_name="color", + value="profile1", + is_default=False, + source_id=profile_1.id, + ), + AttributeProfileDetails( + attribute_name="is_true", + value=True, + is_default=False, + source_id=profile_1.id, + ), + ], + ) + updated_criticality_medium = await NodeManager.get_one(db=db, id=criticality_medium.id, include_source=True) + self.validate_node( + original_node=criticality_medium, + updated_node=updated_criticality_medium, + expected_profile_attrs=[ + AttributeProfileDetails(attribute_name="is_true", value=True, is_default=False, source_id=profile_1.id), + AttributeProfileDetails( + attribute_name="is_false", value=False, is_default=False, source_id=profile_2.id + ), + ], + ) + updated_criticality_high = await NodeManager.get_one(db=db, id=criticality_high.id, include_source=True) + self.validate_node( + original_node=criticality_high, + updated_node=updated_criticality_high, + expected_profile_attrs=[ + AttributeProfileDetails( + attribute_name="is_false", value=False, is_default=False, source_id=profile_2.id + ), + AttributeProfileDetails( + attribute_name="is_true", value=False, is_default=False, source_id=profile_2.id + ), + ], + ) + + # validate node-level changes on value branch + updated_criticality_low = await NodeManager.get_one( + db=db, branch=value_branch, id=criticality_low.id, include_source=True + ) + self.validate_node( + original_node=criticality_low, + updated_node=updated_criticality_low, + expected_profile_attrs=[ + AttributeProfileDetails( + attribute_name="description", + value="profile1_value_update", + is_default=False, + source_id=profile_1.id, + ), + AttributeProfileDetails( + attribute_name="color", + value="profile1_value_update", + is_default=False, + source_id=profile_1.id, + ), + AttributeProfileDetails( + attribute_name="is_true", + value=False, + is_default=False, + source_id=profile_1.id, + ), + ], + ) + updated_criticality_medium = await NodeManager.get_one( + db=db, branch=value_branch, id=criticality_medium.id, include_source=True + ) + self.validate_node( + original_node=criticality_medium, + updated_node=updated_criticality_medium, + expected_profile_attrs=[ + AttributeProfileDetails( + attribute_name="is_true", value=False, is_default=False, source_id=profile_1.id + ), + AttributeProfileDetails( + attribute_name="is_false", value=False, is_default=False, source_id=profile_2.id + ), + ], + ) + updated_criticality_high = await NodeManager.get_one( + db=db, branch=value_branch, id=criticality_high.id, include_source=True + ) + self.validate_node( + original_node=criticality_high, + updated_node=updated_criticality_high, + expected_profile_attrs=[], + ) + + # validate node-level changes on priority branch + updated_criticality_low = await NodeManager.get_one( + db=db, branch=priority_branch, id=criticality_low.id, include_source=True + ) + self.validate_node( + original_node=criticality_low, + updated_node=updated_criticality_low, + expected_profile_attrs=[], + ) + updated_criticality_medium = await NodeManager.get_one( + db=db, branch=priority_branch, id=criticality_medium.id, include_source=True + ) + self.validate_node( + original_node=criticality_medium, + updated_node=updated_criticality_medium, + expected_profile_attrs=[ + AttributeProfileDetails( + attribute_name="is_true", value=False, is_default=False, source_id=profile_2.id + ), + AttributeProfileDetails( + attribute_name="is_false", value=False, is_default=False, source_id=profile_2.id + ), + ], + ) + updated_criticality_high = await NodeManager.get_one( + db=db, branch=priority_branch, id=criticality_high.id, include_source=True + ) + self.validate_node( + original_node=criticality_high, + updated_node=updated_criticality_high, + expected_profile_attrs=[ + AttributeProfileDetails( + attribute_name="is_true", value=False, is_default=False, source_id=profile_2.id + ), + AttributeProfileDetails( + attribute_name="is_false", value=False, is_default=False, source_id=profile_2.id + ), + ], + ) + + # validate node-level changes on deleted profile branch + updated_criticality_low = await NodeManager.get_one( + db=db, branch=deleted_profile_branch, id=criticality_low.id, include_source=True + ) + self.validate_node( + original_node=criticality_low, + updated_node=updated_criticality_low, + # branch would need to be rebased to get profile updates applied on main + expected_profile_attrs=[], + ) + updated_criticality_medium = await NodeManager.get_one( + db=db, branch=deleted_profile_branch, id=criticality_medium.id, include_source=True + ) + self.validate_node( + original_node=criticality_medium, + updated_node=updated_criticality_medium, + expected_profile_attrs=[ + AttributeProfileDetails(attribute_name="is_true", value=True, is_default=False, source_id=profile_1.id), + ], + ) + updated_criticality_high = await NodeManager.get_one( + db=db, branch=deleted_profile_branch, id=criticality_high.id, include_source=True + ) + self.validate_node( + original_node=criticality_high, + updated_node=updated_criticality_high, + expected_profile_attrs=[], + ) + + # validate node-level changes on deleted node branch + updated_criticality_medium = await NodeManager.get_one( + db=db, branch=deleted_node_branch, id=criticality_medium.id, include_source=True + ) + self.validate_node( + original_node=criticality_medium, + updated_node=updated_criticality_medium, + # branch would need to be rebased to get profile updates applied on main + expected_profile_attrs=[], + ) + updated_criticality_high = await NodeManager.get_one( + db=db, branch=deleted_node_branch, id=criticality_high.id, include_source=True + ) + self.validate_node( + original_node=criticality_high, + updated_node=updated_criticality_high, + expected_profile_attrs=[], + ) + + # validate apply_profiles is only called on the required nodes + applier_branches = set(migration._appliers_by_branch.keys()) + assert applier_branches == { + default_branch.name, + value_branch.name, + priority_branch.name, + deleted_profile_branch.name, + deleted_node_branch.name, + } + + main_profile_applier = migration._appliers_by_branch[default_branch.name] + assert main_profile_applier.apply_profiles.call_count == 3 + refreshed_node_uuids = { + call_args[1]["node"].id for call_args in main_profile_applier.apply_profiles.call_args_list + } + assert refreshed_node_uuids == {criticality_low.id, criticality_medium.id, criticality_high.id} + + value_profile_applier = migration._appliers_by_branch[value_branch.name] + assert value_profile_applier.apply_profiles.call_count == 2 + refreshed_node_uuids = { + call_args[1]["node"].id for call_args in value_profile_applier.apply_profiles.call_args_list + } + assert refreshed_node_uuids == {criticality_low.id, criticality_medium.id} + + priority_profile_applier = migration._appliers_by_branch[priority_branch.name] + assert priority_profile_applier.apply_profiles.call_count == 2 + refreshed_node_uuids = { + call_args[1]["node"].id for call_args in priority_profile_applier.apply_profiles.call_args_list + } + assert refreshed_node_uuids == {criticality_medium.id, criticality_high.id} + + deleted_profile_profile_applier = migration._appliers_by_branch[deleted_profile_branch.name] + assert deleted_profile_profile_applier.apply_profiles.call_count == 2 + refreshed_node_uuids = { + call_args[1]["node"].id for call_args in deleted_profile_profile_applier.apply_profiles.call_args_list + } + assert refreshed_node_uuids == {criticality_medium.id, criticality_high.id} + + deleted_node_profile_applier = migration._appliers_by_branch[deleted_node_branch.name] + assert deleted_node_profile_applier.apply_profiles.call_count == 0 diff --git a/backend/tests/unit/core/migrations/graph/test_041_042.py b/backend/tests/unit/core/migrations/graph/test_042_043.py similarity index 88% rename from backend/tests/unit/core/migrations/graph/test_041_042.py rename to backend/tests/unit/core/migrations/graph/test_042_043.py index c98b5941e8..b9f6864444 100644 --- a/backend/tests/unit/core/migrations/graph/test_041_042.py +++ b/backend/tests/unit/core/migrations/graph/test_042_043.py @@ -5,8 +5,8 @@ from infrahub.core import registry from infrahub.core.branch.models import Branch from infrahub.core.manager import NodeManager -from infrahub.core.migrations.graph.m041_create_hfid_display_label_in_db import Migration041 -from infrahub.core.migrations.graph.m042_backfill_hfid_display_label_in_db import Migration042 +from infrahub.core.migrations.graph.m042_create_hfid_display_label_in_db import Migration042 +from infrahub.core.migrations.graph.m043_backfill_hfid_display_label_in_db import Migration043 from infrahub.core.node import Node from infrahub.core.schema import SchemaRoot from infrahub.core.schema.schema_branch import SchemaBranch @@ -27,7 +27,7 @@ """ -class TestMigration041(TestInfrahubApp): +class TestMigration042(TestInfrahubApp): @pytest.fixture async def initial_dataset( self, db: InfrahubDatabase, default_branch: Branch, register_core_models_schema: SchemaBranch @@ -56,7 +56,7 @@ async def initial_dataset( return nodes - async def test_migration_041_042(self, db: InfrahubDatabase, initial_dataset: dict[str, Node]) -> None: + async def test_migration_042_043(self, db: InfrahubDatabase, initial_dataset: dict[str, Node]) -> None: results = await db.execute_query(query=QUERY_HFID) assert not results @@ -69,7 +69,7 @@ async def test_migration_041_042(self, db: InfrahubDatabase, initial_dataset: di assert not results async with db.start_session() as dbs: - migration = Migration041(migrations=[]) + migration = Migration042(migrations=[]) execution_result = await migration.execute(db=dbs) assert not execution_result.errors @@ -77,7 +77,7 @@ async def test_migration_041_042(self, db: InfrahubDatabase, initial_dataset: di assert not validation_result.errors async with db.start_session() as dbs: - migration = Migration042() + migration = Migration043() execution_result = await migration.execute(db=dbs) assert not execution_result.errors diff --git a/backend/tests/unit/graphql/diff/test_diff_tree_query.py b/backend/tests/unit/graphql/diff/test_diff_tree_query.py index 31767b0b52..cde769f803 100644 --- a/backend/tests/unit/graphql/diff/test_diff_tree_query.py +++ b/backend/tests/unit/graphql/diff/test_diff_tree_query.py @@ -220,9 +220,8 @@ async def test_diff_tree_no_changes( from_time = Timestamp(diff_branch.branched_from) to_time = enriched_diff_metadata.to_time - params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=params.schema, source=DIFF_TREE_QUERY, @@ -232,6 +231,7 @@ async def test_diff_tree_no_changes( ) assert result.errors is None + assert result.data assert result.data["DiffTree"] == { "base_branch": default_branch.name, "diff_branch": diff_branch.name, @@ -250,9 +250,8 @@ async def test_diff_tree_no_changes( async def test_diff_tree_no_diffs( db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema, diff_branch: Branch ): - params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=params.schema, source=DIFF_TREE_QUERY, @@ -262,13 +261,13 @@ async def test_diff_tree_no_diffs( ) assert result.errors is None + assert result.data assert result.data["DiffTree"] is None async def test_diff_tree_no_branch(db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema): - params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=params.schema, source=DIFF_TREE_QUERY, @@ -319,9 +318,8 @@ async def test_diff_tree_one_attr_change( await main_crit.save(db=db) await branch_crit.save(db=db) - params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=params.schema, source=DIFF_TREE_QUERY, @@ -334,6 +332,7 @@ async def test_diff_tree_one_attr_change( assert result.errors is None + assert result.data assert result.data["DiffTree"] assert result.data["DiffTree"]["nodes"] node_diff = result.data["DiffTree"]["nodes"][0] @@ -436,9 +435,8 @@ async def test_diff_tree_one_relationship_change( enriched_diff_metadata = await diff_coordinator.update_branch_diff( base_branch=default_branch, diff_branch=diff_branch ) - params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=params.schema, source=DIFF_TREE_QUERY, @@ -451,6 +449,7 @@ async def test_diff_tree_one_relationship_change( assert result.errors is None + assert result.data assert result.data["DiffTree"] diff_tree_response = result.data["DiffTree"].copy() nodes_response = diff_tree_response.pop("nodes") @@ -693,9 +692,8 @@ async def test_diff_tree_hierarchy_change( await rack2_branch.save(db=db) await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=diff_branch) - params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=params.schema, source=DIFF_TREE_QUERY, @@ -705,6 +703,7 @@ async def test_diff_tree_hierarchy_change( ) assert result.errors is None + assert result.data assert len(result.data["DiffTree"]["nodes"]) == 4 nodes_parent = {node["label"]: node["parent"] for node in result.data["DiffTree"]["nodes"]} @@ -720,9 +719,8 @@ async def test_diff_tree_hierarchy_change( async def test_diff_tree_summary_no_diffs( db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema, diff_branch: Branch ): - params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=params.schema, source=DIFF_TREE_QUERY_SUMMARY, @@ -732,6 +730,7 @@ async def test_diff_tree_summary_no_diffs( ) assert result.errors is None + assert result.data assert result.data["DiffTreeSummary"] is None @@ -748,9 +747,8 @@ async def test_diff_tree_summary_no_changes( from_time = Timestamp(diff_branch.branched_from) to_time = enriched_diff_metadata.to_time - params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=params.schema, source=DIFF_TREE_QUERY_SUMMARY, @@ -760,6 +758,7 @@ async def test_diff_tree_summary_no_changes( ) assert result.errors is None + assert result.data assert result.data["DiffTreeSummary"] == { "base_branch": default_branch.name, "diff_branch": diff_branch.name, @@ -845,9 +844,8 @@ async def test_diff_summary_filters( enriched_diff_metadata = await diff_coordinator.update_branch_diff( base_branch=default_branch, diff_branch=diff_branch ) - params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=params.schema, @@ -945,9 +943,8 @@ async def test_diff_get_filters( component_registry = get_component_registry() diff_coordinator = await component_registry.get_component(DiffCoordinator, db=db, branch=diff_branch) await diff_coordinator.update_branch_diff(base_branch=default_branch, diff_branch=diff_branch) - params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=params.schema, diff --git a/backend/tests/unit/graphql/diff/test_diff_update_mutation.py b/backend/tests/unit/graphql/diff/test_diff_update_mutation.py index 63b8a356b9..08f58e4573 100644 --- a/backend/tests/unit/graphql/diff/test_diff_update_mutation.py +++ b/backend/tests/unit/graphql/diff/test_diff_update_mutation.py @@ -58,9 +58,8 @@ async def named_diff( criticality_schema, diff_branch: Branch, ) -> EnrichedDiffRootMetadata: - params = await prepare_graphql_params( - db=db, include_mutation=True, include_subscription=False, branch=default_branch, service=service_testing - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch, service=service_testing) result = await graphql( schema=params.schema, source=DIFF_UPDATE_MUTATION, @@ -69,6 +68,7 @@ async def named_diff( variable_values={"branch": diff_branch.name, "name": self.diff_name}, ) assert result.errors is None + assert result.data assert result.data["DiffUpdate"]["ok"] is True diff_repo = DiffRepository(db=db, deserializer=EnrichedDiffDeserializer(DiffParentNodeAdder())) @@ -90,9 +90,8 @@ async def test_create_diff_before_branched_from_fails( diff_branch: Branch, ): branched_from_timestamp = Timestamp(diff_branch.get_branched_from()) - params = await prepare_graphql_params( - db=db, include_mutation=True, include_subscription=False, branch=default_branch, service=service_testing - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch, service=service_testing) result = await graphql( schema=params.schema, source=DIFF_UPDATE_MUTATION, @@ -105,6 +104,7 @@ async def test_create_diff_before_branched_from_fails( }, ) assert result.errors is None + assert result.data assert result.data["DiffUpdate"]["ok"] is True async def test_create_time_range_diff_without_name_fails( @@ -117,9 +117,8 @@ async def test_create_time_range_diff_without_name_fails( diff_branch: Branch, ): branched_from_timestamp = Timestamp(diff_branch.get_branched_from()) - params = await prepare_graphql_params( - db=db, include_mutation=True, include_subscription=False, branch=default_branch, service=service_testing - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch, service=service_testing) result = await graphql( schema=params.schema, source=DIFF_UPDATE_MUTATION, @@ -145,9 +144,8 @@ async def test_create_diff_with_illegal_times_fails( diff_branch: Branch, named_diff: EnrichedDiffRootMetadata, ): - params = await prepare_graphql_params( - db=db, include_mutation=True, include_subscription=False, branch=default_branch, service=service_testing - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch, service=service_testing) result = await graphql( schema=params.schema, source=DIFF_UPDATE_MUTATION, @@ -189,9 +187,8 @@ async def test_create_named_diff_with_legal_times_succeeds( named_diff: EnrichedDiffRootMetadata, ): branched_from_timestamp = Timestamp(diff_branch.get_branched_from()) - params = await prepare_graphql_params( - db=db, include_mutation=True, include_subscription=False, branch=default_branch, service=service_testing - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch, service=service_testing) result = await graphql( schema=params.schema, source=DIFF_UPDATE_MUTATION, @@ -218,9 +215,8 @@ async def test_retrieve_task_id( named_diff: EnrichedDiffRootMetadata, ): branched_from_timestamp = Timestamp(diff_branch.get_branched_from()) - params = await prepare_graphql_params( - db=db, include_mutation=True, include_subscription=False, branch=default_branch, service=service_testing - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch, service=service_testing) result = await graphql( schema=params.schema, source=DIFF_UPDATE_MUTATION, diff --git a/backend/tests/unit/graphql/mutations/test_action.py b/backend/tests/unit/graphql/mutations/test_action.py index fbd6570fc1..c5ecba4ee1 100644 --- a/backend/tests/unit/graphql/mutations/test_action.py +++ b/backend/tests/unit/graphql/mutations/test_action.py @@ -10,7 +10,8 @@ async def _prepare_group_action( db: InfrahubDatabase, default_branch: Branch, ) -> str: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) query = """ mutation { CoreStandardGroupCreate(data: { @@ -64,7 +65,8 @@ async def test_create_node_trigger_failure_states( db: InfrahubDatabase, default_branch: Branch, register_core_models_schema: None, car_person_schema: None ) -> None: group_id = await _prepare_group_action(db=db, default_branch=default_branch) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_NODE_TRIGGER, @@ -92,7 +94,8 @@ async def test_modify_action_node_attribute_matches( db: InfrahubDatabase, default_branch: Branch, register_core_models_schema: None, car_person_schema: None ) -> None: group_id = await _prepare_group_action(db=db, default_branch=default_branch) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, @@ -153,7 +156,8 @@ async def test_modify_action_node_relationship_matches( db: InfrahubDatabase, default_branch: Branch, register_core_models_schema: None, car_person_schema: None ) -> None: group_id = await _prepare_group_action(db=db, default_branch=default_branch) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, diff --git a/backend/tests/unit/graphql/mutations/test_branch.py b/backend/tests/unit/graphql/mutations/test_branch.py index d43bd010a9..e986737632 100644 --- a/backend/tests/unit/graphql/mutations/test_branch.py +++ b/backend/tests/unit/graphql/mutations/test_branch.py @@ -65,9 +65,9 @@ async def test_branch_create( assert branch2.schema_hash == branch2_schema.get_hash_full() # Validate that we can't create a branch with a name that already exist + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( db=db, - include_subscription=False, branch=default_branch, account_session=session_admin, service=service, @@ -97,9 +97,9 @@ async def test_branch_create( } } """ + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( db=db, - include_subscription=False, branch=default_branch, account_session=session_admin, service=service, @@ -142,9 +142,9 @@ async def test_branch_create_invalid_names( } """ + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( db=db, - include_subscription=False, branch=default_branch, account_session=session_admin, service=service, @@ -218,9 +218,9 @@ async def test_branch_create_registry( } """ + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( db=db, - include_subscription=False, branch=default_branch, account_session=session_admin, service=service, @@ -321,9 +321,9 @@ async def test_branch_rebase_wrong_branch( } """ + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( db=db, - include_subscription=False, service=local_services, branch=default_branch, account_session=session_admin, @@ -357,7 +357,8 @@ async def test_branch_update_description(db: InfrahubDatabase, base_dataset_02, } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch4, service=local_services) + branch4.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch4, service=local_services) result = await graphql( schema=gql_params.schema, source=query, @@ -391,8 +392,9 @@ async def test_branch_merge_wrong_branch( } """ + branch1.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=branch1, account_session=session_admin, service=local_services + db=db, branch=branch1, account_session=session_admin, service=local_services ) result = await graphql( schema=gql_params.schema, @@ -429,8 +431,9 @@ async def test_branch_merge_with_conflict_fails( car_branch.name.value += "-branch" await car_branch.save(db=db) + branch2.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=branch2, account_session=session_admin, service=local_services + db=db, branch=branch2, account_session=session_admin, service=local_services ) result = await graphql( schema=gql_params.schema, diff --git a/backend/tests/unit/graphql/mutations/test_group_event_collection.py b/backend/tests/unit/graphql/mutations/test_group_event_collection.py index ac46deea02..c175000d34 100644 --- a/backend/tests/unit/graphql/mutations/test_group_event_collection.py +++ b/backend/tests/unit/graphql/mutations/test_group_event_collection.py @@ -35,8 +35,9 @@ async def test_node_mutation_to_group_event( memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service, account_session=session_first_account + db=db, branch=default_branch, service=service, account_session=session_first_account ) create_query = """ @@ -102,8 +103,9 @@ async def test_node_mutation_to_group_event( """ memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service, account_session=session_first_account + db=db, branch=default_branch, service=service, account_session=session_first_account ) result = await graphql( schema=gql_params.schema, @@ -145,8 +147,9 @@ async def test_node_mutation_to_group_event( """ memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service, account_session=session_first_account + db=db, branch=default_branch, service=service, account_session=session_first_account ) result = await graphql( schema=gql_params.schema, diff --git a/backend/tests/unit/graphql/mutations/test_ipam.py b/backend/tests/unit/graphql/mutations/test_ipam.py index d72acc2991..f93050f4f0 100644 --- a/backend/tests/unit/graphql/mutations/test_ipam.py +++ b/backend/tests/unit/graphql/mutations/test_ipam.py @@ -272,7 +272,8 @@ async def test_protected_default_ipnamespace(db: InfrahubDatabase, default_branch: Branch, default_ipnamespace: Node): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=DELETE_IPNAMESPACE, @@ -289,7 +290,8 @@ async def test_delete_regular_ipnamespace(db: InfrahubDatabase, default_branch: await ns1.new(db=db, name="ns1") await ns1.save(db=db) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=DELETE_IPNAMESPACE, @@ -298,6 +300,7 @@ async def test_delete_regular_ipnamespace(db: InfrahubDatabase, default_branch: ) assert not result.errors + assert result.data assert result.data["IpamNamespaceDelete"]["ok"] @@ -309,7 +312,8 @@ async def test_ipprefix_create( register_ipam_schema: SchemaBranch, ): """Make sure prefix can be created and parent/children relationships are set.""" - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) supernet = ipaddress.ip_network("2001:db8::/32") result = await graphql( @@ -320,6 +324,7 @@ async def test_ipprefix_create( ) assert not result.errors + assert result.data assert result.data["IpamIPPrefixCreate"]["ok"] assert result.data["IpamIPPrefixCreate"]["object"]["id"] @@ -398,7 +403,8 @@ async def test_ipprefix_create_with_ipnamespace( } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) supernet = ipaddress.ip_network("2001:db8::/32") result = await graphql( @@ -409,6 +415,7 @@ async def test_ipprefix_create_with_ipnamespace( ) assert not result.errors + assert result.data assert result.data["IpamIPPrefixCreate"]["ok"] assert result.data["IpamIPPrefixCreate"]["object"]["id"] @@ -465,7 +472,8 @@ async def test_ipprefix_create_reverse( register_ipam_schema: SchemaBranch, ): """Make sure parent/children relationship are set when creating a parent after a child.""" - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) subnet = ipaddress.ip_network("2001:db8::/48") result = await graphql( @@ -476,6 +484,7 @@ async def test_ipprefix_create_reverse( ) assert not result.errors + assert result.data assert result.data["IpamIPPrefixCreate"]["ok"] supernet = ipaddress.ip_network("2001:db8::/32") @@ -510,7 +519,8 @@ async def test_ipprefix_update( register_ipam_schema: SchemaBranch, ): """Make sure a prefix can be updated.""" - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) subnet = ipaddress.ip_network("2001:db8::/48") result = await graphql( @@ -521,6 +531,7 @@ async def test_ipprefix_update( ) assert not result.errors + assert result.data assert result.data["IpamIPPrefixCreate"]["ok"] subnet_id = result.data["IpamIPPrefixCreate"]["object"]["id"] @@ -548,7 +559,8 @@ async def test_ipprefix_update_within_namespace( await test_ns.save(db=db) ns_hfid = await test_ns.get_hfid_as_string(db=db) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) subnet = ipaddress.ip_network("2001:db8::/48") # Test with namespace by ID @@ -707,7 +719,8 @@ async def test_ipprefix_upsert( register_ipam_schema: SchemaBranch, ): """Make sure a prefix can be upserted.""" - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) subnet = ipaddress.ip_network("2001:db8::/48") result = await graphql( @@ -718,6 +731,7 @@ async def test_ipprefix_upsert( ) assert not result.errors + assert result.data assert result.data["IpamIPPrefixUpsert"]["ok"] assert not result.data["IpamIPPrefixUpsert"]["object"]["description"]["value"] @@ -742,7 +756,8 @@ async def test_ipprefix_delete( register_ipam_schema: SchemaBranch, ): """Make sure deleting a prefix relocates its children.""" - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) networks = [ ipaddress.ip_network("2001:db8::/32"), @@ -817,7 +832,8 @@ async def test_ipaddress_create( register_ipam_schema: SchemaBranch, ): """Make sure IP address is properly created and nested under a subnet.""" - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) # Single IP address, no IP prefix address = ipaddress.ip_interface("192.0.2.1/24") @@ -829,6 +845,7 @@ async def test_ipaddress_create( ) assert not result.errors + assert result.data assert result.data["IpamIPAddressCreate"]["ok"] assert result.data["IpamIPAddressCreate"]["object"]["id"] @@ -892,7 +909,8 @@ async def test_ipaddress_update( register_ipam_schema: SchemaBranch, ): """Make sure an IP address can be updated.""" - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) address = ipaddress.ip_interface("192.0.2.1/24") result = await graphql( @@ -903,6 +921,7 @@ async def test_ipaddress_update( ) assert not result.errors + assert result.data assert result.data["IpamIPAddressCreate"]["ok"] address_id = result.data["IpamIPAddressCreate"]["object"]["id"] @@ -929,7 +948,8 @@ async def test_ipaddress_update_within_namespace( await test_ns.new(db=db, name="test") await test_ns.save(db=db) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) address = ipaddress.ip_interface("192.0.2.1/24") result = await graphql( @@ -965,6 +985,7 @@ async def test_ipaddress_update_within_namespace( ) assert not result.errors + assert result.data assert result.data["IpamIPAddressCreate"]["ok"] assert result.data["IpamIPAddressCreate"]["object"]["ip_namespace"]["node"]["name"]["value"] == test_ns.name.value @@ -1012,7 +1033,8 @@ async def test_ipaddress_upsert( register_ipam_schema: SchemaBranch, ): """Make sure an IP address can be upsert.""" - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) address = ipaddress.ip_interface("192.0.2.1/24") result = await graphql( @@ -1023,6 +1045,7 @@ async def test_ipaddress_upsert( ) assert not result.errors + assert result.data assert result.data["IpamIPAddressUpsert"]["ok"] assert not result.data["IpamIPAddressUpsert"]["object"]["description"]["value"] @@ -1035,6 +1058,7 @@ async def test_ipaddress_upsert( ) assert not result.errors + assert result.data assert result.data["IpamIPAddressUpsert"]["ok"] assert result.data["IpamIPAddressUpsert"]["object"]["description"]["value"] == "RFC 5735" @@ -1047,7 +1071,8 @@ async def test_ipaddress_change_ipprefix( register_ipam_schema: SchemaBranch, ): """Make sure relationship between an address and its prefix is properly managed.""" - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) address = ipaddress.ip_interface("2001:db8::1/64") result = await graphql( @@ -1058,6 +1083,7 @@ async def test_ipaddress_change_ipprefix( ) assert not result.errors + assert result.data assert result.data["IpamIPAddressCreate"]["ok"] # Create subnet which contains the previously created IP should set relationships @@ -1070,6 +1096,7 @@ async def test_ipaddress_change_ipprefix( ) assert not result.errors + assert result.data assert result.data["IpamIPPrefixCreate"]["ok"] result = await graphql( @@ -1211,7 +1238,8 @@ async def test_prefix_ancestors_descendants( await net16.new(db=db, prefix="10.0.0.0/16", parent=net14, ip_namespace=ns1) await net16.save(db=db) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) check_before = await graphql( schema=gql_params.schema, source=GET_PREFIX_HIERARCHY, @@ -1219,6 +1247,7 @@ async def test_prefix_ancestors_descendants( variable_values={"prefix": str(net12.prefix.value)}, ) assert not check_before.errors + assert check_before.data assert len(check_before.data["IpamIPPrefix"]["edges"]) == 1 prefix_details = check_before.data["IpamIPPrefix"]["edges"][0]["node"] assert prefix_details["id"] == net12.id @@ -1312,7 +1341,8 @@ async def test_delete_top_level_prefix( await net10.new(db=db, prefix="10.0.0.0/10", parent=net8, ip_namespace=ns1) await net10.save(db=db) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) delete_top = await graphql( schema=gql_params.schema, source=DELETE_IPPREFIX, @@ -1320,9 +1350,11 @@ async def test_delete_top_level_prefix( variable_values={"id": str(net10.id)}, ) assert not delete_top.errors + assert delete_top.data assert delete_top.data["IpamIPPrefixDelete"]["ok"] is True - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) delete_last_prefix = await graphql( schema=gql_params.schema, source=DELETE_IPPREFIX, @@ -1330,6 +1362,7 @@ async def test_delete_top_level_prefix( variable_values={"id": str(net8.id)}, ) assert not delete_last_prefix.errors + assert delete_last_prefix.data assert delete_last_prefix.data["IpamIPPrefixDelete"]["ok"] is True ip_prefixes = await NodeManager.query(db=db, branch=default_branch, schema="IpamIPPrefix") diff --git a/backend/tests/unit/graphql/mutations/test_mutation_context.py b/backend/tests/unit/graphql/mutations/test_mutation_context.py index 98801e74a3..33e814424a 100644 --- a/backend/tests/unit/graphql/mutations/test_mutation_context.py +++ b/backend/tests/unit/graphql/mutations/test_mutation_context.py @@ -50,9 +50,8 @@ async def test_add_context_invalid_account( } } """ - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, account_session=session_first_account - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch, account_session=session_first_account) result = await graphql( schema=gql_params.schema, source=query, @@ -98,8 +97,9 @@ async def test_add_context_valid_account( memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service, account_session=session_first_account + db=db, branch=default_branch, service=service, account_session=session_first_account ) result = await graphql( schema=gql_params.schema, @@ -139,9 +139,9 @@ async def test_add_context_missing_permissions( } """ % (first_account.id) + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( db=db, - include_subscription=False, branch=default_branch, account_session=session_second_account, ) diff --git a/backend/tests/unit/graphql/mutations/test_proposed_change.py b/backend/tests/unit/graphql/mutations/test_proposed_change.py index 594b0b2f7d..6fce8f1ed5 100644 --- a/backend/tests/unit/graphql/mutations/test_proposed_change.py +++ b/backend/tests/unit/graphql/mutations/test_proposed_change.py @@ -110,9 +110,9 @@ async def test_create_invalid_branch_combinations(db: InfrahubDatabase, default_ await account.new(db=db, name="user", password="password") await account.save(db=db) + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( db=db, - include_subscription=False, branch=default_branch, account_session=AccountSession(authenticated=False, account_id=account.get_id(), auth_type=AuthType.NONE), ) @@ -175,10 +175,9 @@ async def test_create_invalid_state_combinations( account = await Node.init(db=db, schema=InfrahubKind.ACCOUNT) await account.new(db=db, name="user", password="password") await account.save(db=db) - + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( db=db, - include_subscription=False, branch=default_branch, account_session=AccountSession(authenticated=False, account_id=account.get_id(), auth_type=AuthType.NONE), ) diff --git a/backend/tests/unit/graphql/mutations/test_resource_manager.py b/backend/tests/unit/graphql/mutations/test_resource_manager.py index ffb1f10bb5..bb60a78825 100644 --- a/backend/tests/unit/graphql/mutations/test_resource_manager.py +++ b/backend/tests/unit/graphql/mutations/test_resource_manager.py @@ -84,7 +84,8 @@ async def test_create_object_and_assign_prefix_from_pool(db: InfrahubDatabase, d % pool.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -149,7 +150,8 @@ async def test_update_object_and_assign_prefix_from_pool(db: InfrahubDatabase, d } """ % (obj.id, pool.id) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -230,7 +232,8 @@ async def test_create_object_and_assign_address_from_pool( % pool.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -294,7 +297,8 @@ async def test_prefix_pool_get_resource( % pool.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -358,7 +362,8 @@ async def test_prefix_pool_get_resource_with_identifier( % pool.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -420,7 +425,8 @@ async def test_prefix_pool_get_resource_with_prefix_length( % pool.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -478,7 +484,8 @@ async def test_address_pool_get_resource( % pool.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -541,7 +548,8 @@ async def test_address_pool_get_resource_with_identifier( % pool.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -602,7 +610,8 @@ async def test_address_pool_get_resource_with_prefix_length( % pool.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -705,7 +714,8 @@ async def test_test_number_pool_creation_errors( db: InfrahubDatabase, default_branch: Branch, register_core_models_schema ): await load_schema(db=db, schema=SchemaRoot(nodes=[TICKET])) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) no_model = await graphql( schema=gql_params.schema, @@ -789,7 +799,8 @@ async def test_test_number_pool_creation_errors( async def test_test_number_pool_update(db: InfrahubDatabase, default_branch: Branch, register_core_models_schema): await load_schema(db=db, schema=SchemaRoot(nodes=[TICKET])) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) create_ok = await graphql( schema=gql_params.schema, @@ -884,7 +895,8 @@ async def test_delete_number_pool_in_use_by_numberpool_attribute( db: InfrahubDatabase, default_branch: Branch, register_core_models_schema: None ) -> None: await load_schema(db=db, schema=SNOW_TICKET_SCHEMA) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) node_schema = registry.schema.get(name="SnowTask", branch=default_branch) number_pool_attribute = node_schema.get_attribute(name="number") assert isinstance(number_pool_attribute.parameters, NumberPoolParameters) @@ -971,7 +983,8 @@ async def test_update_schema_number_pool_range( db: InfrahubDatabase, default_branch: Branch, register_core_models_schema: None ) -> None: await load_schema(db=db, schema=SNOW_TICKET_SCHEMA) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) node_schema = registry.schema.get(name="SnowTask", branch=default_branch) number_pool_attribute = node_schema.get_attribute(name="number") assert isinstance(number_pool_attribute.parameters, NumberPoolParameters) diff --git a/backend/tests/unit/graphql/mutations/test_schema.py b/backend/tests/unit/graphql/mutations/test_schema.py index 0d4fd7966b..58d7bbccc2 100644 --- a/backend/tests/unit/graphql/mutations/test_schema.py +++ b/backend/tests/unit/graphql/mutations/test_schema.py @@ -1,5 +1,6 @@ import pytest +from infrahub.core.branch import Branch from infrahub.core.node import Node from infrahub.database import InfrahubDatabase from infrahub.exceptions import ValidationError @@ -9,8 +10,8 @@ async def test_delete_last_dropdown_option( - db: InfrahubDatabase, default_permission_backend, default_branch, choices_schema, session_admin -): + db: InfrahubDatabase, default_permission_backend, default_branch: Branch, choices_schema, session_admin +) -> None: query = """ mutation { SchemaDropdownRemove(data: {kind: "TestChoice", attribute: "temperature_scale", dropdown: "celsius"}) { @@ -18,9 +19,8 @@ async def test_delete_last_dropdown_option( } } """ - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, account_session=session_admin - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch, account_session=session_admin) result = await graphql( schema=gql_params.schema, source=query, @@ -34,7 +34,7 @@ async def test_delete_last_dropdown_option( async def test_delete_last_enum_option( - db: InfrahubDatabase, default_permission_backend, default_branch, choices_schema, session_admin + db: InfrahubDatabase, default_permission_backend, default_branch: Branch, choices_schema, session_admin ): query = """ mutation { @@ -43,9 +43,8 @@ async def test_delete_last_enum_option( } } """ - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, account_session=session_admin - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch, account_session=session_admin) result = await graphql( schema=gql_params.schema, source=query, @@ -59,8 +58,8 @@ async def test_delete_last_enum_option( async def test_delete_enum_option_that_does_not_exist( - db: InfrahubDatabase, default_permission_backend, default_branch, choices_schema, session_admin -): + db: InfrahubDatabase, default_permission_backend, default_branch: Branch, choices_schema, session_admin +) -> None: query = """ mutation { SchemaEnumRemove(data: {kind: "BaseChoice", attribute: "color", enum: "yellow"}) { @@ -68,9 +67,8 @@ async def test_delete_enum_option_that_does_not_exist( } } """ - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, account_session=session_admin - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch, account_session=session_admin) result = await graphql( schema=gql_params.schema, source=query, @@ -84,7 +82,7 @@ async def test_delete_enum_option_that_does_not_exist( async def test_delete_drop_option_that_does_not_exist( - db: InfrahubDatabase, default_permission_backend, default_branch, choices_schema, session_admin + db: InfrahubDatabase, default_permission_backend, default_branch: Branch, choices_schema, session_admin ): query = """ mutation { @@ -93,9 +91,8 @@ async def test_delete_drop_option_that_does_not_exist( } } """ - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, account_session=session_admin - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch, account_session=session_admin) result = await graphql( schema=gql_params.schema, source=query, @@ -109,7 +106,7 @@ async def test_delete_drop_option_that_does_not_exist( async def test_add_enum_option_that_exist( - db: InfrahubDatabase, default_permission_backend, default_branch, choices_schema, session_admin + db: InfrahubDatabase, default_permission_backend, default_branch: Branch, choices_schema, session_admin ): query = """ mutation { @@ -118,9 +115,8 @@ async def test_add_enum_option_that_exist( } } """ - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, account_session=session_admin - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch, account_session=session_admin) result = await graphql( schema=gql_params.schema, source=query, @@ -134,7 +130,7 @@ async def test_add_enum_option_that_exist( async def test_delete_dropdown_option_in_use( - db: InfrahubDatabase, default_permission_backend, default_branch, choices_schema, session_admin + db: InfrahubDatabase, default_permission_backend, default_branch: Branch, choices_schema, session_admin ): obj1 = await Node.init(db=db, schema="TestChoice") await obj1.new(db=db, name="test-passive-01", status="passive", temperature_scale="celsius") @@ -147,9 +143,8 @@ async def test_delete_dropdown_option_in_use( } } """ - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, account_session=session_admin - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch, account_session=session_admin) result = await graphql( schema=gql_params.schema, source=query, @@ -163,7 +158,7 @@ async def test_delete_dropdown_option_in_use( async def test_delete_enum_option_in_use( - db: InfrahubDatabase, default_permission_backend, default_branch, choices_schema, session_admin + db: InfrahubDatabase, default_permission_backend, default_branch: Branch, choices_schema, session_admin ): obj1 = await Node.init(db=db, schema="TestChoice") await obj1.new(db=db, name="test-passive-01", status="passive") @@ -176,9 +171,8 @@ async def test_delete_enum_option_in_use( } } """ - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, account_session=session_admin - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch, account_session=session_admin) result = await graphql( schema=gql_params.schema, source=query, diff --git a/backend/tests/unit/graphql/mutations/test_update_generic.py b/backend/tests/unit/graphql/mutations/test_update_generic.py index 25b1833337..6108c78f7a 100644 --- a/backend/tests/unit/graphql/mutations/test_update_generic.py +++ b/backend/tests/unit/graphql/mutations/test_update_generic.py @@ -5,7 +5,7 @@ from tests.helpers.graphql import graphql -async def test_display_label_generic(db: InfrahubDatabase, animal_person_schema, branch: Branch): +async def test_display_label_generic(db: InfrahubDatabase, animal_person_schema, branch: Branch) -> None: person_schema = animal_person_schema.get(name="TestPerson") dog_schema = animal_person_schema.get(name="TestDog") cat_schema = animal_person_schema.get(name="TestCat") @@ -35,7 +35,8 @@ async def test_display_label_generic(db: InfrahubDatabase, animal_person_schema, } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -45,5 +46,6 @@ async def test_display_label_generic(db: InfrahubDatabase, animal_person_schema, ) assert result.errors is None + assert result.data assert result.data["TestAnimalUpdate"]["ok"] is True assert result.data["TestAnimalUpdate"]["object"]["weight"]["value"] diff --git a/backend/tests/unit/graphql/mutations/test_webhook.py b/backend/tests/unit/graphql/mutations/test_webhook.py index 9e19c07bcd..f0bdeba047 100644 --- a/backend/tests/unit/graphql/mutations/test_webhook.py +++ b/backend/tests/unit/graphql/mutations/test_webhook.py @@ -17,7 +17,8 @@ async def test_create_webhook_invalid_node( db: InfrahubDatabase, register_core_models_schema: None, default_branch: Branch ) -> None: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_WEBHOOK, @@ -32,7 +33,8 @@ async def test_create_webhook_invalid_node( async def test_create_webhook_invalid_node_event( db: InfrahubDatabase, register_core_models_schema: None, default_branch: Branch ) -> None: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_WEBHOOK, @@ -47,7 +49,8 @@ async def test_create_webhook_invalid_node_event( async def test_create_webhook_with_node_kind_and_valid_node_event( db: InfrahubDatabase, register_core_models_schema: None, default_branch: Branch ) -> None: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_WEBHOOK, @@ -71,7 +74,8 @@ async def test_create_webhook_with_node_kind_and_valid_node_event( async def test_update_webhook_with_optional_node_kind( db: InfrahubDatabase, register_core_models_schema: None, default_branch: Branch ) -> None: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_WEBHOOK, @@ -105,7 +109,8 @@ async def test_update_webhook_with_optional_node_kind( async def test_create_webhook_with_node_kind_and_all_events( db: InfrahubDatabase, register_core_models_schema: None, default_branch: Branch ) -> None: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_WEBHOOK, @@ -137,7 +142,8 @@ async def test_create_webhook_with_node_kind_and_all_events( async def test_update_to_invalid_states( db: InfrahubDatabase, register_core_models_schema: None, default_branch: Branch ) -> None: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_WEBHOOK, @@ -179,7 +185,8 @@ async def test_update_to_invalid_states( async def test_update_to_valid_states( db: InfrahubDatabase, register_core_models_schema: None, default_branch: Branch ) -> None: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_WEBHOOK, @@ -216,7 +223,8 @@ async def test_update_to_valid_states( async def test_update_description_only( db: InfrahubDatabase, register_core_models_schema: None, default_branch: Branch ) -> None: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=CREATE_WEBHOOK, @@ -254,7 +262,8 @@ async def test_update_description_only( async def test_upsert_webhook(db: InfrahubDatabase, register_core_models_schema: None, default_branch: Branch) -> None: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=UPSERT_WEBHOOK, diff --git a/backend/tests/unit/graphql/profiles/test_mutation_create.py b/backend/tests/unit/graphql/profiles/test_mutation_create.py index c26544a6f2..7f75a38ac1 100644 --- a/backend/tests/unit/graphql/profiles/test_mutation_create.py +++ b/backend/tests/unit/graphql/profiles/test_mutation_create.py @@ -21,7 +21,8 @@ async def test_create_profile(db: InfrahubDatabase, default_branch, car_person_s } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) # gql mutation needs function workflow gql_params.context.service = await InfrahubServices.new(workflow=WorkflowLocalExecution()) result = await graphql( diff --git a/backend/tests/unit/graphql/profiles/test_query.py b/backend/tests/unit/graphql/profiles/test_query.py index 4b8e9cee4c..ea9bcec39a 100644 --- a/backend/tests/unit/graphql/profiles/test_query.py +++ b/backend/tests/unit/graphql/profiles/test_query.py @@ -75,9 +75,8 @@ async def test_create_profile_in_schema(db: InfrahubDatabase, default_branch: Br } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -87,6 +86,7 @@ async def test_create_profile_in_schema(db: InfrahubDatabase, default_branch: Br ) assert result.errors is None + assert result.data assert len(result.data["ProfileTestCriticality"]["edges"]) == 1 assert result.data["ProfileTestCriticality"]["edges"][0]["node"]["display_label"] == obj1.profile_name.value @@ -116,7 +116,8 @@ async def test_upsert_profile_in_schema(db: InfrahubDatabase, default_branch: Br } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) # gql mutation needs function workflow gql_params.context.service = await InfrahubServices.new(workflow=WorkflowLocalExecution()) result = await graphql( @@ -128,6 +129,7 @@ async def test_upsert_profile_in_schema(db: InfrahubDatabase, default_branch: Br ) assert result.errors is None + assert result.data assert result.data["ProfileTestCriticalityUpsert"]["ok"] is True gql_object = result.data["ProfileTestCriticalityUpsert"]["object"] assert gql_object["profile_name"]["value"] == "prof1" @@ -182,9 +184,8 @@ async def test_profile_apply(db: InfrahubDatabase, default_branch: Branch, criti } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -194,6 +195,7 @@ async def test_profile_apply(db: InfrahubDatabase, default_branch: Branch, criti ) assert result.errors is None + assert result.data crits = result.data["TestCriticality"]["edges"] assert len(crits) == 2 assert { @@ -259,9 +261,8 @@ async def test_profile_apply_generic(db: InfrahubDatabase, default_branch: Branc } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -271,6 +272,7 @@ async def test_profile_apply_generic(db: InfrahubDatabase, default_branch: Branc ) assert result.errors is None + assert result.data crits = result.data["TestGenericCriticality"]["edges"] assert len(crits) == 2 assert { @@ -327,9 +329,8 @@ async def test_setting_illegal_profiles_raises_error(db: InfrahubDatabase, defau } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=True, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) crit_schema.generate_profile = False result = await graphql( @@ -424,9 +425,8 @@ async def test_is_from_profile_set_correctly(db: InfrahubDatabase, default_branc } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -436,6 +436,7 @@ async def test_is_from_profile_set_correctly(db: InfrahubDatabase, default_branc ) assert result.errors is None + assert result.data crits = result.data["TestCriticality"]["edges"] assert len(crits) == 3 crits_by_id = {crit["node"]["id"]: crit["node"] for crit in crits} @@ -530,9 +531,8 @@ async def test_is_profile_source_set_correctly(db: InfrahubDatabase, default_bra } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -542,6 +542,7 @@ async def test_is_profile_source_set_correctly(db: InfrahubDatabase, default_bra ) assert result.errors is None + assert result.data crits = result.data["TestCriticality"]["edges"] assert len(crits) == 3 crits_by_id = {crit["node"]["id"]: crit["node"] for crit in crits} diff --git a/backend/tests/unit/graphql/queries/test_branch.py b/backend/tests/unit/graphql/queries/test_branch.py index 044c8b99a4..f6bfd2c082 100644 --- a/backend/tests/unit/graphql/queries/test_branch.py +++ b/backend/tests/unit/graphql/queries/test_branch.py @@ -28,10 +28,9 @@ async def test_branch_query( } } """ - + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( db=db, - include_subscription=False, branch=default_branch, account_session=session_admin, service=service, @@ -61,9 +60,7 @@ async def test_branch_query( } } """ - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service - ) + gql_params = await prepare_graphql_params(db=db, branch=default_branch, service=service) all_branches = await graphql( schema=gql_params.schema, source=query, @@ -108,9 +105,7 @@ async def test_branch_query( } } """ % branch3["name"] - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service - ) + gql_params = await prepare_graphql_params(db=db, branch=default_branch, service=service) name_response = await graphql( schema=gql_params.schema, source=name_query, @@ -134,9 +129,7 @@ async def test_branch_query( """ % [branch3["id"]] id_query = id_query.replace("'", '"') - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service - ) + gql_params = await prepare_graphql_params(db=db, branch=default_branch, service=service) id_response = await graphql( schema=gql_params.schema, source=id_query, diff --git a/backend/tests/unit/graphql/queries/test_display_label.py b/backend/tests/unit/graphql/queries/test_display_label.py index ae2acb9d6a..47dd2ea457 100644 --- a/backend/tests/unit/graphql/queries/test_display_label.py +++ b/backend/tests/unit/graphql/queries/test_display_label.py @@ -43,9 +43,8 @@ async def test_display_label_one_item(db: InfrahubDatabase, default_branch: Bran } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -55,6 +54,7 @@ async def test_display_label_one_item(db: InfrahubDatabase, default_branch: Bran ) assert result.errors is None + assert result.data assert len(result.data["TestCriticality"]["edges"]) == 1 assert result.data["TestCriticality"]["edges"][0]["node"]["display_label"] == "Low" @@ -95,9 +95,8 @@ async def test_display_label_multiple_items(db: InfrahubDatabase, default_branch } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -107,6 +106,7 @@ async def test_display_label_multiple_items(db: InfrahubDatabase, default_branch ) assert result.errors is None + assert result.data assert len(result.data["TestCriticality"]["edges"]) == 2 assert sorted([node["node"]["display_label"] for node in result.data["TestCriticality"]["edges"]]) == [ "low 4", @@ -146,9 +146,8 @@ async def test_display_label_default_value(db: InfrahubDatabase, default_branch: } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -158,6 +157,7 @@ async def test_display_label_default_value(db: InfrahubDatabase, default_branch: ) assert result.errors is None + assert result.data assert len(result.data["TestCriticality"]["edges"]) == 1 assert result.data["TestCriticality"]["edges"][0]["node"]["display_label"] == f"TestCriticality(ID: {obj1.id})" @@ -190,9 +190,8 @@ async def test_display_label_generic(db: InfrahubDatabase, default_branch: Branc } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -202,6 +201,7 @@ async def test_display_label_generic(db: InfrahubDatabase, default_branch: Branc ) assert result.errors is None + assert result.data assert len(result.data["TestAnimal"]["edges"]) == 2 expected_results = ["Kitty Persian #444444", "Rocky Labrador"] assert sorted([item["node"]["display_label"] for item in result.data["TestAnimal"]["edges"]]) == expected_results @@ -257,9 +257,8 @@ async def test_display_label_nested_query( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -379,9 +378,8 @@ async def test_display_label_computed_attr(db: InfrahubDatabase, default_branch: } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -391,6 +389,7 @@ async def test_display_label_computed_attr(db: InfrahubDatabase, default_branch: ) assert result.errors is None + assert result.data assert len(result.data["TestObjectA"]["edges"]) == 1 assert result.data["TestObjectA"]["edges"][0]["node"]["display_label"] == "FIRST" @@ -407,9 +406,8 @@ async def test_display_label_computed_attr(db: InfrahubDatabase, default_branch: } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -419,5 +417,6 @@ async def test_display_label_computed_attr(db: InfrahubDatabase, default_branch: ) assert result.errors is None + assert result.data assert len(result.data["TestObjectB"]["edges"]) == 1 assert result.data["TestObjectB"]["edges"][0]["node"]["display_label"] == "first SECOND" diff --git a/backend/tests/unit/graphql/queries/test_event.py b/backend/tests/unit/graphql/queries/test_event.py index dc3278de54..79a0f5dd53 100644 --- a/backend/tests/unit/graphql/queries/test_event.py +++ b/backend/tests/unit/graphql/queries/test_event.py @@ -413,7 +413,8 @@ async def prefect_client(prefect_test_fixture): async def run_query(db: InfrahubDatabase, branch: Branch, query: str, variables: dict[str, Any]) -> ExecutionResult: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) return await graphql( schema=gql_params.schema, source=query, diff --git a/backend/tests/unit/graphql/queries/test_ipam.py b/backend/tests/unit/graphql/queries/test_ipam.py index 292bb333e6..538fc6772f 100644 --- a/backend/tests/unit/graphql/queries/test_ipam.py +++ b/backend/tests/unit/graphql/queries/test_ipam.py @@ -130,7 +130,8 @@ async def test_ipprefix_nextavailable( ): obj = ip_dataset_01[prefix] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) query = """ query($prefix: String!, $prefix_length: Int) { @@ -171,7 +172,8 @@ async def test_ipaddress_nextavailable( ): obj = ip_dataset_02[prefix] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) query = """ query($prefix: String!, $prefix_length: Int) { @@ -523,7 +525,8 @@ async def test_ipaddress_include_available( prefix: str, result: list[str], ): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) query = """ query($prefix: ID!) { @@ -574,7 +577,8 @@ async def test_ip_address_include_available_filtered_by_kind( limit: int, kinds: list[str], ): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) query = """ query($prefix: ID!, $limit: Int!, $kinds: [String!]) { BuiltinIPAddress(ip_prefix__ids: [$prefix], include_available: true, kinds: $kinds, limit: $limit) { @@ -628,7 +632,8 @@ async def test_ip_address_include_available_filtered_by_kind_invalid( register_ipam_schema: SchemaBranch, ip_dataset_range_various_kinds: dict[str, Node], ): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) query = """ query($prefix: ID!, $kinds: [String!]) { BuiltinIPAddress(ip_prefix__ids: [$prefix], include_available: true, kinds: $kinds) { @@ -733,7 +738,8 @@ async def test_ipaddress_include_available_pagination( offset: int, result: list[str], ): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) query = """ query($prefix: ID!, $limit: Int!, $offset: Int!) { @@ -879,7 +885,8 @@ async def test_ipprefix_include_available( prefix: str, result: list[str], ): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) query = """ query($prefix: ID!) { @@ -1040,7 +1047,8 @@ async def test_ipprefix_include_available_pagination( offset: int, result: list[str], ): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) query = """ query($prefix: ID!, $limit: Int!, $offset: Int!) { diff --git a/backend/tests/unit/graphql/queries/test_relationship.py b/backend/tests/unit/graphql/queries/test_relationship.py index d7d846a005..b4f70306f0 100644 --- a/backend/tests/unit/graphql/queries/test_relationship.py +++ b/backend/tests/unit/graphql/queries/test_relationship.py @@ -33,8 +33,8 @@ async def test_relationship( } } """ - - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) # No identifiers result = await graphql( diff --git a/backend/tests/unit/graphql/queries/test_resource_pool.py b/backend/tests/unit/graphql/queries/test_resource_pool.py index ffd95fb081..ac780ab885 100644 --- a/backend/tests/unit/graphql/queries/test_resource_pool.py +++ b/backend/tests/unit/graphql/queries/test_resource_pool.py @@ -187,7 +187,8 @@ async def test_create_ipv6_prefix_and_read_allocations(db: InfrahubDatabase, def ipv6_prefix_resource = prefix_pools_02["ipv6_prefix_resource"] ipv6_prefix_pool = prefix_pools_02["ipv6_prefix_pool"] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) site1_result = await graphql( schema=gql_params.schema, @@ -257,7 +258,8 @@ async def test_create_ipv6_prefix_and_read_allocations(db: InfrahubDatabase, def # ------------------------------------------------------------ # Validate the utilization query in the main Branch # ------------------------------------------------------------ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) utilization_result = await graphql( schema=gql_params.schema, source=POOL_UTILIZATION, @@ -293,7 +295,8 @@ async def test_create_ipv4_prefix_and_read_allocations(db: InfrahubDatabase, def ipv4_prefix_resource = prefix_pools_02["ipv4_prefix_resource"] ipv4_prefix_pool = prefix_pools_02["ipv4_prefix_pool"] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) site1_result = await graphql( schema=gql_params.schema, @@ -364,7 +367,8 @@ async def test_create_ipv4_address_and_read_allocations(db: InfrahubDatabase, de ipv4_address_resource = prefix_pools_02["ipv4_address_resource"] ipv4_address_pool = prefix_pools_02["ipv4_address_pool"] - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) device1_result = await graphql( schema=gql_params.schema, @@ -434,7 +438,8 @@ async def test_read_resources_in_pool_with_branch(db: InfrahubDatabase, default_ peers = await ipv4_address_pool.resources.get_peers(db=db) # At first there should be 1 resource - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) resources_result = await graphql( schema=gql_params.schema, source=RESOURCES, @@ -464,7 +469,8 @@ async def test_read_resources_in_pool_with_branch(db: InfrahubDatabase, default_ branched_peer_ids = [peer.id for peer in branched_peers.values()] # In main there should be 1 resource - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) resources_result = await graphql( schema=gql_params.schema, source=RESOURCES, @@ -483,7 +489,8 @@ async def test_read_resources_in_pool_with_branch(db: InfrahubDatabase, default_ } == set(branched_peer_ids) # In branch there should be 2 resources - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch.name) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch.name) resources_result = await graphql( schema=gql_params.schema, source=RESOURCES, @@ -568,7 +575,8 @@ async def test_read_resources_in_pool_new_schema_in_branch( } """ # At first there should be 1 resource - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch2) + branch2.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch2) resources_result = await graphql( schema=gql_params.schema, source=IP_PREFIX_RESOURCES, @@ -608,7 +616,8 @@ async def test_read_resources_in_pool_new_schema_in_branch( # ------------------------------------------------------------ # Validate the utilization query in the main Branch # ------------------------------------------------------------ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) utilization_result = await graphql( schema=gql_params.schema, source=POOL_UTILIZATION, @@ -631,7 +640,8 @@ async def test_read_resources_in_pool_new_schema_in_branch( # ------------------------------------------------------------ # Validate the utilization query in Branch2 # ------------------------------------------------------------ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch2) + branch2.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch2) utilization_result = await graphql( schema=gql_params.schema, source=POOL_UTILIZATION, @@ -676,7 +686,8 @@ async def test_read_resources_in_pool_with_branch_with_mutations( peer_id = peer_ids[0] # At first there should be 1 resource - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) resources_result = await graphql( schema=gql_params.schema, source=RESOURCES, @@ -693,7 +704,8 @@ async def test_read_resources_in_pool_with_branch_with_mutations( branch = await create_branch(branch_name="issue-3579", db=db) # Create a prefix - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) prefix_result = await graphql( schema=gql_params.schema, source=""" @@ -730,7 +742,8 @@ async def test_read_resources_in_pool_with_branch_with_mutations( ) # In main there should be 1 resource - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) resources_result = await graphql( schema=gql_params.schema, source=RESOURCES, @@ -749,7 +762,8 @@ async def test_read_resources_in_pool_with_branch_with_mutations( } == {peer_id, prefix_id} # In branch there should be 2 resources - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch.name) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch.name) resources_result = await graphql( schema=gql_params.schema, source=RESOURCES, @@ -770,7 +784,8 @@ async def test_read_resources_in_pool_with_branch_with_mutations( async def test_number_pool_utilization(db: InfrahubDatabase, default_branch: Branch, register_core_models_schema): await load_schema(db=db, schema=SchemaRoot(nodes=[TICKET])) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) await initialization(db=db) create_ok = await graphql( schema=gql_params.schema, diff --git a/backend/tests/unit/graphql/queries/test_search.py b/backend/tests/unit/graphql/queries/test_search.py index ae7b003a42..4b32d1a371 100644 --- a/backend/tests/unit/graphql/queries/test_search.py +++ b/backend/tests/unit/graphql/queries/test_search.py @@ -31,8 +31,9 @@ async def test_search_anywhere_by_uuid( car_prius_main: Node, car_yaris_main: Node, branch: Branch, -): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) +) -> None: + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, @@ -60,7 +61,8 @@ async def test_search_anywhere_by_string( car_yaris_main: Node, branch: Branch, ): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, @@ -102,8 +104,9 @@ async def test_search_ipv6_address_extended_format( db: InfrahubDatabase, ip_dataset_01, branch: Branch, -): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) +) -> None: + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) res_collapsed = await graphql( schema=gql_params.schema, @@ -145,7 +148,8 @@ async def test_search_ipv6_network_extended_format( ip_dataset_01, branch: Branch, ): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) res_collapsed = await graphql( schema=gql_params.schema, @@ -181,8 +185,9 @@ async def test_search_ipv6_partial_address( db: InfrahubDatabase, ip_dataset_01, branch: Branch, -): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) +) -> None: + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) res_two_segments = await graphql( schema=gql_params.schema, @@ -230,12 +235,12 @@ async def test_search_ipv4( db: InfrahubDatabase, ip_dataset_01, branch: Branch, -): +) -> None: """ This only tests that ipv6 search specific behavior does not break ipv4 search. """ - - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result_address = await graphql( schema=gql_params.schema, @@ -301,12 +306,12 @@ async def test_search_groups( register_core_models_schema, register_builtin_models_schema, car_person_data_generic, -): +) -> None: group1 = await Node.init(db=db, schema=InfrahubKind.STANDARDGROUP) await group1.new(db=db, name="group1", members=[car_person_data_generic["c1"], car_person_data_generic["c2"]]) await group1.save(db=db) - - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, @@ -328,7 +333,8 @@ async def test_search_anywhere_by_string_no_results( register_builtin_models_schema: None, ) -> None: """Validate that the GraphQL an empty result is returned as an empty array and not a `null` value""" - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, diff --git a/backend/tests/unit/graphql/queries/test_task.py b/backend/tests/unit/graphql/queries/test_task.py index c4845a86d5..bb786ca7f6 100644 --- a/backend/tests/unit/graphql/queries/test_task.py +++ b/backend/tests/unit/graphql/queries/test_task.py @@ -230,7 +230,8 @@ async def flow_runs_data(prefect_client: PrefectClient, tag_blue, tag_red, accou async def run_query(db: InfrahubDatabase, branch: Branch, query: str, variables: dict[str, Any]) -> ExecutionResult: - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) return await graphql( schema=gql_params.schema, source=query, diff --git a/backend/tests/unit/graphql/test_core_account.py b/backend/tests/unit/graphql/test_core_account.py index a4497eeb81..743f552063 100644 --- a/backend/tests/unit/graphql/test_core_account.py +++ b/backend/tests/unit/graphql/test_core_account.py @@ -10,7 +10,7 @@ from tests.helpers.graphql import graphql -async def test_everyone_can_update_password(db: InfrahubDatabase, default_branch: Branch, first_account): +async def test_everyone_can_update_password(db: InfrahubDatabase, default_branch: Branch, first_account) -> None: new_password = "NewP@ssw0rd" new_description = "what a cool description" query = """ @@ -21,9 +21,9 @@ async def test_everyone_can_update_password(db: InfrahubDatabase, default_branch } """ % (new_password, new_description) + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( db=db, - include_subscription=False, branch=default_branch, account_session=AccountSession(authenticated=True, account_id=first_account.id, auth_type=AuthType.JWT), ) @@ -52,7 +52,7 @@ async def test_permissions( authentication_base, session_admin, first_account, -): +) -> None: query = """ query { InfrahubPermissions { @@ -74,9 +74,8 @@ async def test_permissions( } """ - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, account_session=session_admin - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch, account_session=session_admin) result = await graphql( schema=gql_params.schema, source=query, context_value=gql_params.context, root_value=None, variable_values={} @@ -100,7 +99,6 @@ async def test_permissions( gql_params = await prepare_graphql_params( db=db, - include_subscription=False, branch=default_branch, account_session=AccountSession(authenticated=True, account_id=first_account.id, auth_type=AuthType.JWT), ) diff --git a/backend/tests/unit/graphql/test_graphql_query.py b/backend/tests/unit/graphql/test_graphql_query.py index 7eaaf9c1f6..88764c3d51 100644 --- a/backend/tests/unit/graphql/test_graphql_query.py +++ b/backend/tests/unit/graphql/test_graphql_query.py @@ -20,7 +20,7 @@ from tests.helpers.graphql import graphql -async def test_info_query(db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema): +async def test_info_query(db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema) -> None: query = """ query { InfrahubInfo { @@ -28,10 +28,8 @@ async def test_info_query(db: InfrahubDatabase, default_branch: Branch, critical } } """ - - params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=params.schema, source=query, @@ -41,10 +39,11 @@ async def test_info_query(db: InfrahubDatabase, default_branch: Branch, critical ) assert result.errors is None + assert result.data assert result.data["InfrahubInfo"]["version"] == __version__ -async def test_simple_query(db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema): +async def test_simple_query(db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema) -> None: obj1 = await Node.init(db=db, schema=criticality_schema) await obj1.new(db=db, name="low", level=4) await obj1.save(db=db) @@ -66,10 +65,8 @@ async def test_simple_query(db: InfrahubDatabase, default_branch: Branch, critic } } """ - - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -79,6 +76,7 @@ async def test_simple_query(db: InfrahubDatabase, default_branch: Branch, critic ) assert result.errors is None + assert result.data assert result.data["TestCriticality"]["count"] == 2 assert len(result.data["TestCriticality"]["edges"]) == 2 assert gql_params.context.related_node_ids == {obj1.id, obj2.id} @@ -86,7 +84,7 @@ async def test_simple_query(db: InfrahubDatabase, default_branch: Branch, critic async def test_simple_query_with_offset_and_limit( db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema -): +) -> None: obj1 = await Node.init(db=db, schema=criticality_schema) await obj1.new(db=db, name="low", level=4) await obj1.save(db=db) @@ -108,9 +106,8 @@ async def test_simple_query_with_offset_and_limit( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -120,11 +117,12 @@ async def test_simple_query_with_offset_and_limit( ) assert result.errors is None + assert result.data assert result.data["TestCriticality"]["count"] == 2 assert len(result.data["TestCriticality"]["edges"]) == 1 -async def test_display_hfid(db: InfrahubDatabase, default_branch: Branch, animal_person_schema: SchemaBranch): +async def test_display_hfid(db: InfrahubDatabase, default_branch: Branch, animal_person_schema: SchemaBranch) -> None: person_schema = animal_person_schema.get(name="TestPerson") dog_schema = animal_person_schema.get(name="TestDog") @@ -149,9 +147,8 @@ async def test_display_hfid(db: InfrahubDatabase, default_branch: Branch, animal } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -161,6 +158,7 @@ async def test_display_hfid(db: InfrahubDatabase, default_branch: Branch, animal ) assert result.errors is None + assert result.data assert len(result.data["TestDog"]["edges"]) == 1 assert result.data["TestDog"]["edges"][0] == { "node": { @@ -173,9 +171,9 @@ async def test_display_hfid(db: InfrahubDatabase, default_branch: Branch, animal async def test_display_hfid_related_node( db: InfrahubDatabase, default_branch: Branch, animal_person_schema: SchemaBranch -): - person_schema = animal_person_schema.get(name="TestPerson") - dog_schema = animal_person_schema.get(name="TestDog") +) -> None: + person_schema = animal_person_schema.get_node(name="TestPerson") + dog_schema = animal_person_schema.get_node(name="TestDog") person1 = await Node.init(db=db, schema=person_schema, branch=default_branch) await person1.new(db=db, name="Jack") @@ -203,9 +201,8 @@ async def test_display_hfid_related_node( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -215,6 +212,7 @@ async def test_display_hfid_related_node( ) assert result.errors is None + assert result.data assert len(result.data["TestPerson"]["edges"]) == 1 assert result.data["TestPerson"]["edges"][0] == { "node": { @@ -226,7 +224,7 @@ async def test_display_hfid_related_node( async def test_all_attributes( db: InfrahubDatabase, default_branch: Branch, data_schema: None, all_attribute_types_schema: NodeSchema -): +) -> None: obj1 = await Node.init(db=db, schema="TestAllAttributeTypes") await obj1.new( db=db, @@ -271,9 +269,8 @@ async def test_all_attributes( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -283,6 +280,7 @@ async def test_all_attributes( ) assert result.errors is None + assert result.data assert len(result.data["TestAllAttributeTypes"]["edges"]) == 2 results = {item["node"]["name"]["value"]: item["node"] for item in result.data["TestAllAttributeTypes"]["edges"]} @@ -312,9 +310,9 @@ async def test_all_attributes( assert results["obj2"]["prefix"]["prefixlen"] is None -async def test_nested_query(db: InfrahubDatabase, default_branch: Branch, car_person_schema: SchemaBranch): - car = registry.schema.get(name="TestCar") - person = registry.schema.get(name="TestPerson") +async def test_nested_query(db: InfrahubDatabase, default_branch: Branch, car_person_schema: SchemaBranch) -> None: + car = registry.schema.get_node_schema(name="TestCar") + person = registry.schema.get_node_schema(name="TestPerson") p1 = await Node.init(db=db, schema=person) await p1.new(db=db, name="John", height=180) @@ -356,9 +354,8 @@ async def test_nested_query(db: InfrahubDatabase, default_branch: Branch, car_pe } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -369,6 +366,7 @@ async def test_nested_query(db: InfrahubDatabase, default_branch: Branch, car_pe assert result.errors is None + assert result.data result_per_name = {result["node"]["name"]["value"]: result["node"] for result in result.data["TestPerson"]["edges"]} assert sorted(result_per_name.keys()) == ["Jane", "John"] assert len(result_per_name["John"]["cars"]["edges"]) == 2 @@ -376,9 +374,11 @@ async def test_nested_query(db: InfrahubDatabase, default_branch: Branch, car_pe assert gql_params.context.related_node_ids == {p1.id, p2.id, c1.id, c2.id, c3.id} -async def test_double_nested_query(db: InfrahubDatabase, default_branch: Branch, car_person_schema: SchemaBranch): - car = registry.schema.get(name="TestCar") - person = registry.schema.get(name="TestPerson") +async def test_double_nested_query( + db: InfrahubDatabase, default_branch: Branch, car_person_schema: SchemaBranch +) -> None: + car = registry.schema.get_node_schema(name="TestCar") + person = registry.schema.get_node_schema(name="TestPerson") p1 = await Node.init(db=db, schema=person) await p1.new(db=db, name="John", height=180) @@ -427,9 +427,8 @@ async def test_double_nested_query(db: InfrahubDatabase, default_branch: Branch, } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -440,6 +439,7 @@ async def test_double_nested_query(db: InfrahubDatabase, default_branch: Branch, assert result.errors is None + assert result.data result_per_name = {result["node"]["name"]["value"]: result["node"] for result in result.data["TestPerson"]["edges"]} assert sorted(result_per_name.keys()) == ["Jane", "John"] assert len(result_per_name["John"]["cars"]["edges"]) == 2 @@ -453,7 +453,7 @@ async def test_double_nested_query(db: InfrahubDatabase, default_branch: Branch, async def test_nested_query_single_relationship( db: InfrahubDatabase, default_branch: Branch, node_group_schema, data_schema -): +) -> None: raw_schema = { "version": "1.0", "generics": [ @@ -533,9 +533,8 @@ async def test_nested_query_single_relationship( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -545,7 +544,7 @@ async def test_nested_query_single_relationship( ) assert result.errors is None - + assert result.data result_per_name = { result["node"]["name"]["value"]: result["node"] for result in result.data["InfraDevice"]["edges"] } @@ -640,9 +639,8 @@ async def test_nested_generic_query_many_relationship( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -672,9 +670,9 @@ async def test_nested_generic_query_many_relationship( } -async def test_query_typename(db: InfrahubDatabase, default_branch: Branch, car_person_schema: SchemaBranch): - car = registry.schema.get(name="TestCar") - person = registry.schema.get(name="TestPerson") +async def test_query_typename(db: InfrahubDatabase, default_branch: Branch, car_person_schema: SchemaBranch) -> None: + car = registry.schema.get_node_schema(name="TestCar") + person = registry.schema.get_node_schema(name="TestPerson") p1 = await Node.init(db=db, schema=person) await p1.new(db=db, name="John", height=180) @@ -736,9 +734,8 @@ async def test_query_typename(db: InfrahubDatabase, default_branch: Branch, car_ } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -747,6 +744,7 @@ async def test_query_typename(db: InfrahubDatabase, default_branch: Branch, car_ variable_values={}, ) + assert result.data assert result.errors is None result_per_name = {result["node"]["name"]["value"]: result["node"] for result in result.data["TestPerson"]["edges"]} @@ -761,7 +759,7 @@ async def test_query_typename(db: InfrahubDatabase, default_branch: Branch, car_ assert result_per_name["John"]["cars"]["edges"][0]["properties"]["__typename"] == "RelationshipProperty" -async def test_query_filter_ids(db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema): +async def test_query_filter_ids(db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema) -> None: obj1 = await Node.init(db=db, schema=criticality_schema) await obj1.new(db=db, name="low", level=4) await obj1.save(db=db) @@ -788,9 +786,8 @@ async def test_query_filter_ids(db: InfrahubDatabase, default_branch: Branch, cr """ % obj1.id ) - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -800,6 +797,7 @@ async def test_query_filter_ids(db: InfrahubDatabase, default_branch: Branch, cr ) assert result.errors is None + assert result.data assert len(result.data["TestCriticality"]["edges"]) == 1 query = """ @@ -818,9 +816,8 @@ async def test_query_filter_ids(db: InfrahubDatabase, default_branch: Branch, cr obj1.id, obj2.id, ) - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -830,6 +827,7 @@ async def test_query_filter_ids(db: InfrahubDatabase, default_branch: Branch, cr ) assert result.errors is None + assert result.data assert len(result.data["TestCriticality"]["edges"]) == 2 @@ -841,7 +839,7 @@ async def test_query_filter_relationship_isnull( person_jane_main: Node, car_camry_main: Node, car_accord_main: Node, -): +) -> None: query = """ query { TestPerson(cars__isnull: true) { @@ -854,9 +852,8 @@ async def test_query_filter_relationship_isnull( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -866,6 +863,7 @@ async def test_query_filter_relationship_isnull( ) assert result.errors is None + assert result.data assert result.data["TestPerson"]["count"] == 1 assert len(result.data["TestPerson"]["edges"]) == 1 assert result.data["TestPerson"]["edges"][0]["node"]["id"] == person_albert_main.id @@ -891,6 +889,7 @@ async def test_query_filter_relationship_isnull( ) assert result.errors is None + assert result.data assert result.data["TestPerson"]["count"] == 2 assert len(result.data["TestPerson"]["edges"]) == 2 result_person_ids = {node["node"]["id"] for node in result.data["TestPerson"]["edges"]} @@ -922,9 +921,8 @@ async def test_query_filter_attribute_isnull( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -934,6 +932,7 @@ async def test_query_filter_attribute_isnull( ) assert result.errors is None + assert result.data assert result.data["TestPerson"]["count"] == 1 assert len(result.data["TestPerson"]["edges"]) == 1 assert result.data["TestPerson"]["edges"][0]["node"]["id"] == person_albert_main.id @@ -959,13 +958,16 @@ async def test_query_filter_attribute_isnull( ) assert result.errors is None + assert result.data assert result.data["TestPerson"]["count"] == 2 assert len(result.data["TestPerson"]["edges"]) == 2 result_person_ids = {node["node"]["id"] for node in result.data["TestPerson"]["edges"]} assert result_person_ids == {person_john_main.id, person_jane_main.id} -async def test_query_filter_local_attrs(db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema): +async def test_query_filter_local_attrs( + db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema +) -> None: obj1 = await Node.init(db=db, schema=criticality_schema) await obj1.new(db=db, name="low", level=4) await obj1.save(db=db) @@ -986,9 +988,8 @@ async def test_query_filter_local_attrs(db: InfrahubDatabase, default_branch: Br } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -998,6 +999,7 @@ async def test_query_filter_local_attrs(db: InfrahubDatabase, default_branch: Br ) assert result.errors is None + assert result.data assert len(result.data["TestCriticality"]["edges"]) == 1 @@ -1009,7 +1011,8 @@ async def test_query_filter_on_enum( car_person_schema: SchemaBranch, graphql_enums_on: bool, enum_value: Literal["MANUAL", '"manual"'], -): + reset_graphql_schema_between_tests: None, +) -> None: config.SETTINGS.experimental_features.graphql_enums = graphql_enums_on car = registry.schema.get(name="TestCar") @@ -1030,10 +1033,8 @@ async def test_query_filter_on_enum( } } """ % (enum_value) - graphql_registry.clear_cache() - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1043,6 +1044,7 @@ async def test_query_filter_on_enum( ) assert result.errors is None + assert result.data assert len(result.data["TestCar"]["edges"]) == 1 assert result.data["TestCar"]["edges"][0]["node"]["name"]["value"] == "GoKart" @@ -1092,9 +1094,8 @@ async def test_query_multiple_filters( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query01, @@ -1104,6 +1105,7 @@ async def test_query_multiple_filters( ) assert result.errors is None + assert result.data assert len(result.data["TestCar"]["edges"]) == 1 assert result.data["TestCar"]["edges"][0]["node"]["id"] == c1.id @@ -1121,9 +1123,8 @@ async def test_query_multiple_filters( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query02, @@ -1133,6 +1134,7 @@ async def test_query_multiple_filters( ) assert result.errors is None + assert result.data assert len(result.data["TestCar"]["edges"]) == 1 assert result.data["TestCar"]["edges"][0]["node"]["id"] == c3.id @@ -1150,9 +1152,8 @@ async def test_query_multiple_filters( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query03, @@ -1162,6 +1163,7 @@ async def test_query_multiple_filters( ) assert result.errors is None + assert result.data assert len(result.data["TestCar"]["edges"]) == 1 assert result.data["TestCar"]["edges"][0]["node"]["id"] == c2.id @@ -1182,9 +1184,8 @@ async def test_query_multiple_filters( p1.id, m2.id, ) - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query04, @@ -1194,6 +1195,7 @@ async def test_query_multiple_filters( ) assert result.errors is None + assert result.data assert len(result.data["TestCar"]["edges"]) == 1 assert result.data["TestCar"]["edges"][0]["node"]["id"] == c2.id @@ -1227,9 +1229,8 @@ async def test_query_multiple_filters( } } """ % (p1.id) - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query05, @@ -1239,6 +1240,7 @@ async def test_query_multiple_filters( ) assert result.errors is None + assert result.data assert len(result.data["TestCar"]["edges"]) == 2 assert {node["node"]["id"] for node in result.data["TestCar"]["edges"]} == {c1.id, c2.id} @@ -1290,9 +1292,8 @@ async def test_query_filter_relationships( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1302,6 +1303,7 @@ async def test_query_filter_relationships( ) assert result.errors is None + assert result.data assert len(result.data["TestPerson"]["edges"]) == 1 assert result.data["TestPerson"]["count"] == 1 assert result.data["TestPerson"]["edges"][0]["node"]["name"]["value"] == "John" @@ -1336,9 +1338,8 @@ async def test_query_filter_relationships_with_generic( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1348,6 +1349,7 @@ async def test_query_filter_relationships_with_generic( ) assert result.errors is None + assert result.data assert len(result.data["TestPerson"]["edges"]) == 1 assert result.data["TestPerson"]["edges"][0]["node"]["name"]["value"] == "John" assert len(result.data["TestPerson"]["edges"][0]["node"]["cars"]["edges"]) == 1 @@ -1380,9 +1382,8 @@ async def test_query_filter_relationships_with_generic_filter( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1401,6 +1402,7 @@ async def test_query_filter_relationships_with_generic_filter( } } ] + assert result.data assert DeepDiff(result.data["TestPerson"]["edges"], expected_results, ignore_order=True).to_dict() == {} @@ -1455,9 +1457,8 @@ async def test_query_filter_relationship_id( """ % c1.id ) - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1467,6 +1468,7 @@ async def test_query_filter_relationship_id( ) assert result.errors is None + assert result.data assert len(result.data["TestPerson"]["edges"]) == 1 assert result.data["TestPerson"]["edges"][0]["node"]["name"]["value"] == "John" assert len(result.data["TestPerson"]["edges"][0]["node"]["cars"]["edges"]) == 1 @@ -1497,9 +1499,8 @@ async def test_query_filter_relationship_id( c1.id, c4.id, ) - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1509,6 +1510,7 @@ async def test_query_filter_relationship_id( ) assert result.errors is None + assert result.data assert len(result.data["TestPerson"]["edges"]) == 1 assert result.data["TestPerson"]["edges"][0]["node"]["name"]["value"] == "John" assert len(result.data["TestPerson"]["edges"][0]["node"]["cars"]["edges"]) == 2 @@ -1564,10 +1566,8 @@ async def test_query_filter_list( } } """ % {"filter": graphql_filter} - - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1577,6 +1577,7 @@ async def test_query_filter_list( ) assert result.errors is None + assert result.data names = sorted([item["node"]["name"]["value"] for item in result.data["TestCriticality"]["edges"]]) assert names == expected_results @@ -1601,9 +1602,8 @@ async def test_query_attribute_multiple_values( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1613,6 +1613,7 @@ async def test_query_attribute_multiple_values( ) assert result.errors is None + assert result.data assert result.data["TestPerson"]["count"] == 2 @@ -1665,9 +1666,8 @@ async def test_query_relationship_multiple_values( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1677,6 +1677,7 @@ async def test_query_relationship_multiple_values( ) assert result.errors is None + assert result.data assert len(result.data["TestPerson"]["edges"]) == 2 assert result.data["TestPerson"]["edges"][0]["node"]["cars"]["edges"][0]["node"]["name"]["value"] == "volt" assert result.data["TestPerson"]["edges"][1]["node"]["cars"]["edges"][0]["node"]["name"]["value"] == "nolt" @@ -1713,9 +1714,8 @@ async def test_query_oneway_relationship(db: InfrahubDatabase, default_branch: B } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1725,6 +1725,7 @@ async def test_query_oneway_relationship(db: InfrahubDatabase, default_branch: B ) assert result.errors is None + assert result.data assert len(result.data["TestPerson"]["edges"][0]["node"]["tags"]["edges"]) == 2 @@ -1754,9 +1755,8 @@ async def test_query_at_specific_time(db: InfrahubDatabase, default_branch: Bran } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1766,6 +1766,7 @@ async def test_query_at_specific_time(db: InfrahubDatabase, default_branch: Bran ) assert result.errors is None + assert result.data assert len(result.data[InfrahubKind.TAG]["edges"]) == 2 names = sorted([tag["node"]["name"]["value"] for tag in result.data[InfrahubKind.TAG]["edges"]]) assert names == ["Blue", "Green"] @@ -1784,9 +1785,8 @@ async def test_query_at_specific_time(db: InfrahubDatabase, default_branch: Bran } } """ - gql_params = await prepare_graphql_params( - db=db, at=time1, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, at=time1, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1796,6 +1796,7 @@ async def test_query_at_specific_time(db: InfrahubDatabase, default_branch: Bran ) assert result.errors is None + assert result.data assert len(result.data[InfrahubKind.TAG]["edges"]) == 2 names = sorted([tag["node"]["name"]["value"] for tag in result.data[InfrahubKind.TAG]["edges"]]) assert names == ["Blue", "Red"] @@ -1825,9 +1826,8 @@ async def test_query_attribute_updated_at(db: InfrahubDatabase, default_branch: } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result1 = await graphql( schema=gql_params.schema, source=query, @@ -1837,6 +1837,7 @@ async def test_query_attribute_updated_at(db: InfrahubDatabase, default_branch: ) assert result1.errors is None + assert result1.data assert result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["updated_at"] assert ( result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["updated_at"] @@ -1847,9 +1848,8 @@ async def test_query_attribute_updated_at(db: InfrahubDatabase, default_branch: p12.firstname.value = "Jim" await p12.save(db=db) - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result2 = await graphql( schema=gql_params.schema, source=query, @@ -1859,6 +1859,7 @@ async def test_query_attribute_updated_at(db: InfrahubDatabase, default_branch: ) assert result2.errors is None + assert result2.data assert result2.data["TestPerson"]["edges"][0]["node"]["firstname"]["updated_at"] assert ( result2.data["TestPerson"]["edges"][0]["node"]["firstname"]["updated_at"] @@ -1883,9 +1884,8 @@ async def test_query_node_updated_at(db: InfrahubDatabase, default_branch: Branc } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result1 = await graphql( schema=gql_params.schema, source=query, @@ -1895,15 +1895,15 @@ async def test_query_node_updated_at(db: InfrahubDatabase, default_branch: Branc ) assert result1.errors is None + assert result1.data assert result1.data["TestPerson"]["edges"][0]["node"]["_updated_at"] p2 = await Node.init(db=db, schema="TestPerson") await p2.new(db=db, firstname="Jane", lastname="Doe") await p2.save(db=db) - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result2 = await graphql( schema=gql_params.schema, source=query, @@ -1913,6 +1913,7 @@ async def test_query_node_updated_at(db: InfrahubDatabase, default_branch: Branc ) assert result2.errors is None + assert result2.data assert result2.data["TestPerson"]["edges"][0]["node"]["_updated_at"] assert result2.data["TestPerson"]["edges"][1]["node"]["_updated_at"] assert result2.data["TestPerson"]["edges"][1]["node"]["_updated_at"] == Timestamp( @@ -1956,9 +1957,8 @@ async def test_query_relationship_updated_at(db: InfrahubDatabase, default_branc } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result1 = await graphql( schema=gql_params.schema, source=query, @@ -1968,15 +1968,15 @@ async def test_query_relationship_updated_at(db: InfrahubDatabase, default_branc ) assert result1.errors is None + assert result1.data assert result1.data["TestPerson"]["edges"] == [] p1 = await Node.init(db=db, schema="TestPerson") await p1.new(db=db, firstname="John", lastname="Doe", tags=[t1, t2]) await p1.save(db=db) - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result2 = await graphql( schema=gql_params.schema, source=query, @@ -1986,6 +1986,7 @@ async def test_query_relationship_updated_at(db: InfrahubDatabase, default_branc ) assert result2.errors is None + assert result2.data assert len(result2.data["TestPerson"]["edges"][0]["node"]["tags"]["edges"]) == 2 assert ( result2.data["TestPerson"]["edges"][0]["node"]["tags"]["edges"][0]["node"]["_updated_at"] @@ -2024,10 +2025,9 @@ async def test_query_attribute_node_property_source( } } """ - graphql_registry.clear_cache() - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result1 = await graphql( schema=gql_params.schema, source=query, @@ -2037,6 +2037,7 @@ async def test_query_attribute_node_property_source( ) assert result1.errors is None + assert result1.data assert result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["source"] assert result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["source"]["id"] == first_account.id assert gql_params.context.related_node_ids == {p1.id, first_account.id} @@ -2083,10 +2084,8 @@ async def test_query_attribute_node_property_owner( } } """ - graphql_registry.clear_cache() - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result1 = await graphql( schema=gql_params.schema, source=query, @@ -2131,9 +2130,8 @@ async def test_query_attribute_node_property_owner( } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result2 = await graphql( schema=gql_params.schema, source=query, @@ -2219,9 +2217,8 @@ async def test_query_relationship_node_property( } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -2230,7 +2227,7 @@ async def test_query_relationship_node_property( variable_values={}, ) assert result.errors is None - + assert result.data results = {item["node"]["name"]["value"]: item["node"] for item in result.data["TestPerson"]["edges"]} assert sorted(results.keys()) == ["Jane", "John"] assert len(results["John"]["cars"]["edges"]) == 1 @@ -2277,10 +2274,8 @@ async def test_query_relationship_node_property( } } """ - - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -2290,6 +2285,7 @@ async def test_query_relationship_node_property( ) assert result.errors is None + assert result.data results = {item["node"]["name"]["value"]: item["node"] for item in result.data["TestCar"]["edges"]} assert set(results.keys()) == {"volt", "bolt"} @@ -2358,9 +2354,8 @@ async def test_query_relationship_node_property( } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -2369,6 +2364,7 @@ async def test_query_relationship_node_property( variable_values={}, ) assert result.errors is None + assert result.data owner_results = { item["node"]["name"]["value"]: item["node"] for item in result.data["people_with_cars_and_owners"]["edges"] @@ -2456,9 +2452,8 @@ async def test_same_many_relationship_with_different_limits_offsets( john_cars_by_uuid = sorted([car_accord_main, car_prius_main], key=lambda c: c.id) jane_cars_by_uuid = sorted([car_camry_main, car_yaris_main], key=lambda c: c.id) - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -2467,6 +2462,7 @@ async def test_same_many_relationship_with_different_limits_offsets( variable_values={}, ) assert result.errors is None + assert result.data for person_node in result.data["people_with_cars_1"]["edges"]: person_name = person_node["node"]["name"]["value"] @@ -2519,10 +2515,8 @@ async def test_query_attribute_flag_property( } } """ - graphql_registry.clear_cache() - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result1 = await graphql( schema=gql_params.schema, source=query, @@ -2548,9 +2542,8 @@ async def test_query_branches(db: InfrahubDatabase, default_branch: Branch, regi } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result1 = await graphql( schema=gql_params.schema, source=query, @@ -2560,6 +2553,7 @@ async def test_query_branches(db: InfrahubDatabase, default_branch: Branch, regi ) assert result1.errors is None + assert result1.data assert result1.data["Branch"][0]["name"] == "main" @@ -2582,9 +2576,8 @@ async def test_query_multiple_branches( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result1 = await graphql( schema=gql_params.schema, source=query, @@ -2594,6 +2587,7 @@ async def test_query_multiple_branches( ) assert result1.errors is None + assert result1.data assert result1.data["branch1"][0]["name"] == "main" assert result1.data["branch2"][0]["name"] == "main" @@ -2632,9 +2626,8 @@ async def test_multiple_queries(db: InfrahubDatabase, default_branch: Branch, pe } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result1 = await graphql( schema=gql_params.schema, source=query, @@ -2644,6 +2637,7 @@ async def test_multiple_queries(db: InfrahubDatabase, default_branch: Branch, pe ) assert result1.errors is None + assert result1.data assert result1.data["firstperson"]["edges"][0]["node"]["firstname"]["value"] == "John" assert result1.data["secondperson"]["edges"][0]["node"]["firstname"]["value"] == "Jane" assert gql_params.context.related_node_ids == {p1.id, p2.id} @@ -2743,9 +2737,7 @@ async def test_model_rel_interface(db: InfrahubDatabase, default_branch: Branch, } """ default_branch.update_schema_hash() - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -2806,9 +2798,7 @@ async def test_model_rel_interface_reverse(db: InfrahubDatabase, default_branch: } """ default_branch.update_schema_hash() - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -2840,10 +2830,8 @@ async def test_generic_root_with_pagination( } } """ - graphql_registry.clear_cache() - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -2942,10 +2930,8 @@ async def test_member_of_groups( } } """ - graphql_registry.clear_cache() - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -3012,9 +2998,8 @@ async def test_hierarchical_location_parent_filter( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -3022,6 +3007,7 @@ async def test_hierarchical_location_parent_filter( root_value=None, variable_values={}, ) + assert result.data nodes = [node["node"]["name"]["value"] for node in result.data["LocationRack"]["edges"]] @@ -3064,9 +3050,8 @@ async def test_hierarchical_location_ancestors( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -3076,6 +3061,7 @@ async def test_hierarchical_location_ancestors( ) assert result.errors is None + assert result.data rack = result.data["LocationRack"]["edges"][0]["node"] ancestors = rack["ancestors"]["edges"] descendants = rack["descendants"]["edges"] @@ -3120,9 +3106,8 @@ async def test_hierarchical_location_descendants( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -3132,6 +3117,7 @@ async def test_hierarchical_location_descendants( ) assert result.errors is None + assert result.data asia = result.data["LocationRegion"]["edges"][0]["node"] ancestors = asia["ancestors"]["edges"] descendants = asia["descendants"]["edges"] @@ -3175,9 +3161,8 @@ async def test_hierarchical_location_descendants_filters_attr( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -3187,6 +3172,7 @@ async def test_hierarchical_location_descendants_filters_attr( ) assert result.errors is None + assert result.data asia = result.data["LocationRegion"]["edges"][0]["node"] descendants = asia["descendants"]["edges"] descendants_names = [node["node"]["name"]["value"] for node in descendants] @@ -3228,9 +3214,8 @@ async def test_hierarchical_location_descendants_filters_ids( hierarchical_location_data["beijing-r1"].id, hierarchical_location_data["singapore-r2"].id, ) - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -3240,6 +3225,7 @@ async def test_hierarchical_location_descendants_filters_ids( ) assert result.errors is None + assert result.data asia = result.data["LocationRegion"]["edges"][0]["node"] descendants = asia["descendants"]["edges"] descendants_names = [node["node"]["name"]["value"] for node in descendants] @@ -3278,9 +3264,8 @@ async def test_hierarchical_location_include_descendants( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -3290,6 +3275,7 @@ async def test_hierarchical_location_include_descendants( ) assert result.errors is None + assert result.data asia = result.data["LocationRegion"]["edges"][0]["node"] things = asia["things"]["edges"] things_names = [node["node"]["name"]["value"] for node in things] @@ -3398,9 +3384,8 @@ async def test_properties_on_different_query_paths( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -3410,6 +3395,7 @@ async def test_properties_on_different_query_paths( ) assert result.errors is None + assert result.data # check owners are correct for rack in result.data["LocationRack"]["edges"]: @@ -3458,9 +3444,8 @@ async def test_hierarchical_groups_descendants( } } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -3470,6 +3455,7 @@ async def test_hierarchical_groups_descendants( ) assert result.errors is None + assert result.data grp1 = result.data["CoreStandardGroup"]["edges"][0]["node"] members = grp1["members"]["edges"] members_ids = [node["node"]["id"] for node in members] diff --git a/backend/tests/unit/graphql/test_graphql_utils.py b/backend/tests/unit/graphql/test_graphql_utils.py index b41fe5b9a7..19ac31c426 100644 --- a/backend/tests/unit/graphql/test_graphql_utils.py +++ b/backend/tests/unit/graphql/test_graphql_utils.py @@ -27,9 +27,12 @@ def generate_graphql_schema( ) -async def test_schema_models(db: InfrahubDatabase, default_branch: Branch, car_person_schema_generics, query_01: str): +async def test_schema_models( + db: InfrahubDatabase, default_branch: Branch, car_person_schema_generics: None, query_01: str +) -> None: document = parse(query_01) - schema = generate_graphql_schema(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + schema = generate_graphql_schema(db=db, branch=default_branch) fields = await extract_fields(document.definitions[0].selection_set) expected_response = { @@ -49,7 +52,8 @@ async def test_schema_models_generics( db: InfrahubDatabase, default_branch: Branch, car_person_schema_generics, query_02: str ): document = parse(query_02) - schema = generate_graphql_schema(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + schema = generate_graphql_schema(db=db, branch=default_branch) fields = await extract_fields(document.definitions[0].selection_set) expected_response = { diff --git a/backend/tests/unit/graphql/test_mutation_artifact_definition.py b/backend/tests/unit/graphql/test_mutation_artifact_definition.py index 57fcda1352..e7cd2f24ef 100644 --- a/backend/tests/unit/graphql/test_mutation_artifact_definition.py +++ b/backend/tests/unit/graphql/test_mutation_artifact_definition.py @@ -175,9 +175,7 @@ async def test_update_artifact_definition( authenticated=True, account_id=create_test_admin.id, session_id=None, auth_type=AuthType.API ) branch.update_schema_hash() - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=branch, service=service, account_session=account_session - ) + gql_params = await prepare_graphql_params(db=db, branch=branch, service=service, account_session=account_session) with patch( "infrahub.services.adapters.workflow.local.WorkflowLocalExecution.submit_workflow" ) as mock_submit_workflow: diff --git a/backend/tests/unit/graphql/test_mutation_create.py b/backend/tests/unit/graphql/test_mutation_create.py index 2a805dac54..3111081cc3 100644 --- a/backend/tests/unit/graphql/test_mutation_create.py +++ b/backend/tests/unit/graphql/test_mutation_create.py @@ -15,13 +15,12 @@ from infrahub.core.schema.schema_branch import SchemaBranch from infrahub.database import InfrahubDatabase from infrahub.graphql.initialization import prepare_graphql_params -from infrahub.graphql.registry import registry as graphql_registry from tests.constants import TestKind from tests.helpers.graphql import graphql from tests.helpers.schema import DEVICE_SCHEMA -async def test_create_simple_object(db: InfrahubDatabase, default_branch, car_person_schema): +async def test_create_simple_object(db: InfrahubDatabase, default_branch: Branch, car_person_schema: None) -> None: query = """ mutation { TestPersonCreate(data: {name: { value: "John"}, height: {value: 182}}) { @@ -32,7 +31,8 @@ async def test_create_simple_object(db: InfrahubDatabase, default_branch, car_pe } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -42,6 +42,7 @@ async def test_create_simple_object(db: InfrahubDatabase, default_branch, car_pe ) assert result.errors is None + assert result.data assert result.data["TestPersonCreate"]["ok"] is True person_id = result.data["TestPersonCreate"]["object"]["id"] @@ -52,7 +53,9 @@ async def test_create_simple_object(db: InfrahubDatabase, default_branch, car_pe assert person.height.is_default is False -async def test_create_simple_object_with_ok_return(db: InfrahubDatabase, default_branch, car_person_schema): +async def test_create_simple_object_with_ok_return( + db: InfrahubDatabase, default_branch: Branch, car_person_schema: None +) -> None: query = """ mutation { TestPersonCreate(data: {name: { value: "John"}, height: {value: 182}}) { @@ -60,7 +63,8 @@ async def test_create_simple_object_with_ok_return(db: InfrahubDatabase, default } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -69,6 +73,7 @@ async def test_create_simple_object_with_ok_return(db: InfrahubDatabase, default variable_values={}, ) assert result.errors is None + assert result.data assert result.data["TestPersonCreate"]["ok"] is True @@ -87,7 +92,8 @@ async def test_create_with_id(db: InfrahubDatabase, default_branch, car_person_s """ % uuid1 ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -97,6 +103,7 @@ async def test_create_with_id(db: InfrahubDatabase, default_branch, car_person_s ) assert result.errors is None + assert result.data assert result.data["TestPersonCreate"]["ok"] is True assert result.data["TestPersonCreate"]["object"]["id"] == uuid1 @@ -110,7 +117,8 @@ async def test_create_with_id(db: InfrahubDatabase, default_branch, car_person_s } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -139,7 +147,8 @@ async def test_create_check_unique(db: InfrahubDatabase, default_branch, car_per } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -171,7 +180,8 @@ async def test_create_check_unique_across_branch(db: InfrahubDatabase, default_b branch1 = await create_branch(branch_name="branch1", db=db) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch1) + branch1.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch1) result = await graphql( schema=gql_params.schema, source=query, @@ -202,7 +212,8 @@ async def test_create_check_unique_in_branch(db: InfrahubDatabase, default_branc } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch1) + branch1.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch1) result = await graphql( schema=gql_params.schema, source=query, @@ -233,7 +244,8 @@ async def test_attr_optional_uniqueness_constraint_create( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -244,7 +256,7 @@ async def test_attr_optional_uniqueness_constraint_create( assert result.errors is None - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -252,6 +264,7 @@ async def test_attr_optional_uniqueness_constraint_create( root_value=None, variable_values={}, ) + assert result.errors assert len(result.errors) == 1 assert result.errors[0].message == "Violates uniqueness constraint 'name-description'" @@ -277,7 +290,8 @@ async def test_all_attributes(db: InfrahubDatabase, default_branch, all_attribut } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -287,6 +301,7 @@ async def test_all_attributes(db: InfrahubDatabase, default_branch, all_attribut ) assert result.errors is None + assert result.data assert result.data["TestAllAttributeTypesCreate"]["ok"] is True assert len(result.data["TestAllAttributeTypesCreate"]["object"]["id"]) == 36 # length of an UUID @@ -326,7 +341,8 @@ async def test_all_attributes_default_value(db: InfrahubDatabase, default_branch } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -336,6 +352,7 @@ async def test_all_attributes_default_value(db: InfrahubDatabase, default_branch ) assert result.errors is None + assert result.data assert result.data["TestAllAttributeTypesCreate"]["ok"] is True obj_id = result.data["TestAllAttributeTypesCreate"]["object"]["id"] assert len(obj_id) == 36 # length of an UUID @@ -386,7 +403,8 @@ async def test_create_object_with_flag_property(db: InfrahubDatabase, default_br } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -396,6 +414,7 @@ async def test_create_object_with_flag_property(db: InfrahubDatabase, default_br ) assert result.errors is None + assert result.data assert result.data["TestPersonCreate"]["ok"] is True assert len(result.data["TestPersonCreate"]["object"]["id"]) == 36 # length of an UUID @@ -418,7 +437,8 @@ async def test_create_object_with_flag_property(db: InfrahubDatabase, default_br } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result1 = await graphql( schema=gql_params.schema, source=query, @@ -428,6 +448,7 @@ async def test_create_object_with_flag_property(db: InfrahubDatabase, default_br ) assert result1.errors is None + assert result1.data assert result1.data["TestPerson"]["edges"][0]["node"]["name"]["is_protected"] is True assert result1.data["TestPerson"]["edges"][0]["node"]["height"]["is_visible"] is False @@ -454,7 +475,8 @@ async def test_create_object_with_node_property( second_account.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -464,6 +486,7 @@ async def test_create_object_with_node_property( ) assert result.errors is None + assert result.data assert result.data["TestPersonCreate"]["ok"] is True assert len(result.data["TestPersonCreate"]["object"]["id"]) == 36 # length of an UUID @@ -493,7 +516,8 @@ async def test_create_object_with_node_property( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result1 = await graphql( schema=gql_params.schema, source=query, @@ -503,6 +527,7 @@ async def test_create_object_with_node_property( ) assert result1.errors is None + assert result1.data assert result1.data["TestPerson"]["edges"][0]["node"]["name"]["source"]["id"] == first_account.id assert result1.data["TestPerson"]["edges"][0]["node"]["name"]["source"][ "display_label" @@ -535,7 +560,8 @@ async def test_create_object_with_single_relationship(db: InfrahubDatabase, defa } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -545,6 +571,7 @@ async def test_create_object_with_single_relationship(db: InfrahubDatabase, defa ) assert result.errors is None + assert result.data assert result.data["TestCarCreate"]["ok"] is True assert len(result.data["TestCarCreate"]["object"]["id"]) == 36 # length of an UUID @@ -567,7 +594,8 @@ async def test_create_object_with_invalid_single_relationship_fails( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -575,6 +603,7 @@ async def test_create_object_with_invalid_single_relationship_fails( root_value=None, variable_values={}, ) + assert result.errors assert len(result.errors) == 1 gql_error = result.errors[0] assert "Unable to find the node pretend region / LocationRegion in the database." in gql_error.message @@ -602,7 +631,8 @@ async def test_create_object_with_single_relationship_flag_property( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -612,6 +642,7 @@ async def test_create_object_with_single_relationship_flag_property( ) assert result.errors is None + assert result.data assert result.data["TestCarCreate"]["ok"] is True assert len(result.data["TestCarCreate"]["object"]["id"]) == 36 @@ -647,7 +678,8 @@ async def test_create_object_with_single_relationship_node_property( """ % first_account.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -657,6 +689,7 @@ async def test_create_object_with_single_relationship_node_property( ) assert result.errors is None + assert result.data assert result.data["TestCarCreate"]["ok"] is True assert len(result.data["TestCarCreate"]["object"]["id"]) == 36 @@ -693,7 +726,8 @@ async def test_create_object_with_multiple_relationships(db: InfrahubDatabase, d } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -703,6 +737,7 @@ async def test_create_object_with_multiple_relationships(db: InfrahubDatabase, d ) assert result.errors is None + assert result.data assert result.data["GardenFruitCreate"]["ok"] is True assert len(result.data["GardenFruitCreate"]["object"]["id"]) == 36 # length of an UUID @@ -747,7 +782,8 @@ async def test_create_object_with_multiple_relationships_with_node_property( first_account.id, second_account.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -757,6 +793,7 @@ async def test_create_object_with_multiple_relationships_with_node_property( ) assert result.errors is None + assert result.data assert result.data["GardenFruitCreate"]["ok"] is True assert len(result.data["GardenFruitCreate"]["object"]["id"]) == 36 # length of an UUID @@ -817,7 +854,8 @@ async def test_create_object_with_multiple_relationships_flag_property( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -827,6 +865,7 @@ async def test_create_object_with_multiple_relationships_flag_property( ) assert result.errors is None + assert result.data assert result.data["GardenFruitCreate"]["ok"] is True assert len(result.data["GardenFruitCreate"]["object"]["id"]) == 36 # length of an UUID @@ -883,7 +922,8 @@ async def test_create_relationship_for_node_with_migrated_kind( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=group_create_query, @@ -896,7 +936,8 @@ async def test_create_relationship_for_node_with_migrated_kind( main_group_id = result.data["CoreStandardGroupCreate"]["object"]["id"] # create group on branch - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=group_create_query, @@ -924,7 +965,8 @@ async def test_create_relationship_for_node_with_migrated_kind( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=group_members_query, @@ -937,7 +979,8 @@ async def test_create_relationship_for_node_with_migrated_kind( assert result.data["CoreStandardGroup"]["edges"][0]["node"]["members"]["count"] == 1 # check relationship count on branch - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=group_members_query, @@ -1032,7 +1075,8 @@ async def test_create_person_not_valid(db: InfrahubDatabase, default_branch, car } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1041,6 +1085,7 @@ async def test_create_person_not_valid(db: InfrahubDatabase, default_branch, car variable_values={}, ) + assert result.errors assert len(result.errors) == 1 assert result.errors[0].message == "Expected value of type 'BigInt', found \"182\"." @@ -1066,7 +1111,8 @@ async def test_create_with_attribute_not_valid(db: InfrahubDatabase, default_bra } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1075,6 +1121,7 @@ async def test_create_with_attribute_not_valid(db: InfrahubDatabase, default_bra variable_values={}, ) + assert result.errors assert len(result.errors) == 1 assert "#44444444 must have a maximum length of 7 at color" in result.errors[0].message @@ -1108,7 +1155,8 @@ async def test_create_with_uniqueness_constraint_violation(db: InfrahubDatabase, } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1116,6 +1164,7 @@ async def test_create_with_uniqueness_constraint_violation(db: InfrahubDatabase, root_value=None, variable_values={}, ) + assert result.errors assert len(result.errors) == 1 assert "Violates uniqueness constraint 'owner-color'" in result.errors[0].message @@ -1142,7 +1191,8 @@ async def test_relationship_with_hfid(db: InfrahubDatabase, default_branch, anim } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1151,6 +1201,7 @@ async def test_relationship_with_hfid(db: InfrahubDatabase, default_branch, anim variable_values={}, ) assert result.errors is None + assert result.data assert result.data["TestDogCreate"]["ok"] is True assert result.data["TestDogCreate"]["object"]["id"] @@ -1185,7 +1236,8 @@ async def test_incorrect_peer_type_prevented(db: InfrahubDatabase, default_branc } } """ % {"animal_id": person2.id} - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1241,7 +1293,8 @@ async def test_create_valid_datetime_success(db: InfrahubDatabase, default_branc } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1250,6 +1303,7 @@ async def test_create_valid_datetime_success(db: InfrahubDatabase, default_branc variable_values={}, ) assert result.errors is None + assert result.data assert result.data["TestCriticalityCreate"]["ok"] is True crit = await NodeManager.get_one(db=db, id=result.data["TestCriticalityCreate"]["object"]["id"]) assert crit.time.value == "2021-01-01T00:00:00Z" @@ -1266,7 +1320,8 @@ async def test_create_valid_datetime_failure(db: InfrahubDatabase, default_branc } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1274,6 +1329,8 @@ async def test_create_valid_datetime_failure(db: InfrahubDatabase, default_branc root_value=None, variable_values={}, ) + assert result.data + assert result.errors assert result.errors[0].args[0] == "10:1010 is not a valid DateTime at time" assert result.data["TestCriticalityCreate"] is None @@ -1582,6 +1639,7 @@ async def test_create_simple_object_with_enum( graphql_enums_on, enum_value, response_value, + reset_graphql_schema_between_tests, ): config.SETTINGS.experimental_features.graphql_enums = graphql_enums_on query = """ @@ -1603,8 +1661,8 @@ async def test_create_simple_object_with_enum( } } """ % (enum_value) - graphql_registry.clear_cache() - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1649,7 +1707,8 @@ async def test_create_enum_when_enums_off_fails( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1658,6 +1717,7 @@ async def test_create_enum_when_enums_off_fails( variable_values={}, ) + assert result.errors assert len(result.errors) == 1 assert "String cannot represent a non string value" in result.errors[0].message @@ -1667,6 +1727,7 @@ async def test_create_string_when_enums_on_fails( default_branch, person_john_main, car_person_schema, + reset_graphql_schema_between_tests, ): config.SETTINGS.experimental_features.graphql_enums = True query = """ @@ -1688,8 +1749,8 @@ async def test_create_string_when_enums_on_fails( } } """ - graphql_registry.clear_cache() - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, diff --git a/backend/tests/unit/graphql/test_mutation_create_jinja2_attributes.py b/backend/tests/unit/graphql/test_mutation_create_jinja2_attributes.py index 7c0d31cc93..02e0e7239a 100644 --- a/backend/tests/unit/graphql/test_mutation_create_jinja2_attributes.py +++ b/backend/tests/unit/graphql/test_mutation_create_jinja2_attributes.py @@ -45,7 +45,8 @@ async def test_create_with_jinja2_computed_attributes_on_related_node( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -94,7 +95,8 @@ async def test_create_with_jinja2_computed_attributes_on_hierarchial_node( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -134,7 +136,8 @@ async def test_create_with_jinja2_with_generics( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) continent_result = await graphql( schema=gql_params.schema, source=continent_query, @@ -166,7 +169,8 @@ async def test_create_with_jinja2_with_generics( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) country_result = await graphql( schema=gql_params.schema, source=country_query, @@ -201,7 +205,8 @@ async def test_create_with_jinja2_with_generics( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) site_result = await graphql( schema=gql_params.schema, source=site_query, diff --git a/backend/tests/unit/graphql/test_mutation_delete.py b/backend/tests/unit/graphql/test_mutation_delete.py index a26b59d116..47b7f23ff8 100644 --- a/backend/tests/unit/graphql/test_mutation_delete.py +++ b/backend/tests/unit/graphql/test_mutation_delete.py @@ -35,7 +35,8 @@ async def test_delete_object(db: InfrahubDatabase, default_branch, car_person_sc """ % obj1.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -64,7 +65,8 @@ async def test_delete_prevented( """ % person_jane_main.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -110,7 +112,8 @@ async def test_delete_allowed_when_peer_rel_optional_on_generic( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -149,7 +152,8 @@ async def test_delete_prevented_when_peer_rel_required_on_generic( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -187,8 +191,9 @@ async def test_delete_events_with_cascade( memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service, account_session=session_first_account + db=db, branch=default_branch, service=service, account_session=session_first_account ) query = """ mutation DeletePerson($human_id: String!){ diff --git a/backend/tests/unit/graphql/test_mutation_generator.py b/backend/tests/unit/graphql/test_mutation_generator.py index eced6db474..e7f0655732 100644 --- a/backend/tests/unit/graphql/test_mutation_generator.py +++ b/backend/tests/unit/graphql/test_mutation_generator.py @@ -65,7 +65,7 @@ async def test_run_generator_definition( ) default_branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service, account_session=account_session + db=db, branch=default_branch, service=service, account_session=account_session ) with patch( diff --git a/backend/tests/unit/graphql/test_mutation_graphqlquery.py b/backend/tests/unit/graphql/test_mutation_graphqlquery.py index 1c80c730a8..b9e7df817b 100644 --- a/backend/tests/unit/graphql/test_mutation_graphqlquery.py +++ b/backend/tests/unit/graphql/test_mutation_graphqlquery.py @@ -112,7 +112,7 @@ async def test_create_query_with_vars(db: InfrahubDatabase, default_branch: Bran """ % query_value.replace("\n", " ").replace('"', '\\"') default_branch.update_schema_hash() - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -209,7 +209,7 @@ async def test_update_query(db: InfrahubDatabase, default_branch: Branch, regist query_update.replace("\n", " ").replace('"', '\\"'), ) default_branch.update_schema_hash() - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -272,7 +272,7 @@ async def test_update_query_no_update(db: InfrahubDatabase, default_branch: Bran } """ % (obj.id) default_branch.update_schema_hash() - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, diff --git a/backend/tests/unit/graphql/test_mutation_relationship.py b/backend/tests/unit/graphql/test_mutation_relationship.py index a814f2d478..ff5ff94331 100644 --- a/backend/tests/unit/graphql/test_mutation_relationship.py +++ b/backend/tests/unit/graphql/test_mutation_relationship.py @@ -83,8 +83,9 @@ async def test_relationship_add( memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=branch, service=service, account_session=session_first_account + db=db, branch=branch, service=service, account_session=session_first_account ) result = await graphql( schema=gql_params.schema, @@ -140,8 +141,9 @@ async def test_relationship_add( memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=branch, service=service, account_session=session_first_account + db=db, branch=branch, service=service, account_session=session_first_account ) result = await graphql( schema=gql_params.schema, @@ -203,7 +205,8 @@ async def test_relationship_remove( tag_black_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -241,7 +244,8 @@ async def test_relationship_remove( tag_red_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -281,7 +285,8 @@ async def test_relationship_wrong_name( tag_blue_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -309,7 +314,8 @@ async def test_relationship_wrong_name( tag_blue_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -347,7 +353,8 @@ async def test_relationship_wrong_node( bad_uuid, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -375,7 +382,8 @@ async def test_relationship_wrong_node( person_jack_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -456,7 +464,7 @@ async def test_relationship_groups_add( service = await InfrahubServices.new(event=memory_event) default_branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service, account_session=session_first_account + db=db, branch=default_branch, service=service, account_session=session_first_account ) result = await graphql( schema=gql_params.schema, @@ -503,8 +511,9 @@ async def test_relationship_groups_add( ) memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service, account_session=session_first_account + db=db, branch=default_branch, service=service, account_session=session_first_account ) result = await graphql( schema=gql_params.schema, @@ -606,7 +615,7 @@ async def test_relationship_groups_remove( service = await InfrahubServices.new(event=memory_event) default_branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service, account_session=session_first_account + db=db, branch=default_branch, service=service, account_session=session_first_account ) result = await graphql( schema=gql_params.schema, @@ -649,8 +658,9 @@ async def test_relationship_groups_remove( memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service, account_session=session_first_account + db=db, branch=default_branch, service=service, account_session=session_first_account ) result = await graphql( @@ -712,7 +722,7 @@ async def test_relationship_groups_add_remove(db: InfrahubDatabase, default_bran g2.id, ) default_branch.update_schema_hash() - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -747,7 +757,8 @@ async def test_relationship_groups_add_remove(db: InfrahubDatabase, default_bran g2.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -786,7 +797,8 @@ async def test_relationship_groups_add_remove(db: InfrahubDatabase, default_bran g2.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -821,7 +833,8 @@ async def test_relationship_groups_add_remove(db: InfrahubDatabase, default_bran g2.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -863,7 +876,7 @@ async def test_relationship_add_busy(db: InfrahubDatabase, default_branch: Branc ) default_branch.update_schema_hash() - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -932,7 +945,8 @@ async def test_relationship_add_for_node_with_migrated_kind( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=add_members_query, @@ -943,7 +957,8 @@ async def test_relationship_add_for_node_with_migrated_kind( assert not result.errors # add person to group on branch - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=add_members_query, @@ -969,7 +984,8 @@ async def test_relationship_add_for_node_with_migrated_kind( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=group_members_query, @@ -982,7 +998,8 @@ async def test_relationship_add_for_node_with_migrated_kind( assert result.data["CoreStandardGroup"]["edges"][0]["node"]["members"]["count"] == 1 # check relationship count on branch - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=group_members_query, @@ -1090,7 +1107,8 @@ async def test_relationship_add_from_pool( } """ % (hugh.id, "ip_prefixes", prefix_pool_01["prefix_pool"].id) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, context_value=gql_params.context, root_value=None, variable_values={} ) @@ -1143,7 +1161,8 @@ async def test_add_generic_related_node_with_hfid( } """ % (person.id) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, @@ -1220,9 +1239,7 @@ async def test_with_permissions( """ default_branch.update_schema_hash() - gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, account_session=first_session - ) + gql_params = await prepare_graphql_params(db=db, branch=default_branch, account_session=first_session) result = await graphql( schema=gql_params.schema, source=query % (person_jack_main.id, tag_blue_main.id), @@ -1326,9 +1343,8 @@ async def test_relationship_read_only( await device1.new(db=db, name="device1", location=site1) await device1.save(db=db) - gql_params = await prepare_graphql_params( - db=db, include_mutation=True, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) add_query = """ mutation RelationshipAdd( diff --git a/backend/tests/unit/graphql/test_mutation_update.py b/backend/tests/unit/graphql/test_mutation_update.py index 6f3d49fd0b..a9fa1604ab 100644 --- a/backend/tests/unit/graphql/test_mutation_update.py +++ b/backend/tests/unit/graphql/test_mutation_update.py @@ -39,7 +39,8 @@ async def test_update_simple_object(db: InfrahubDatabase, person_john_main: Node """ % person_john_main.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -49,6 +50,7 @@ async def test_update_simple_object(db: InfrahubDatabase, person_john_main: Node ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True obj1 = await NodeManager.get_one(db=db, id=person_john_main.id, branch=branch) @@ -67,7 +69,8 @@ async def test_update_simple_object_with_ok_return(db: InfrahubDatabase, person_ """ % person_john_main.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -77,6 +80,7 @@ async def test_update_simple_object_with_ok_return(db: InfrahubDatabase, person_ ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True obj1 = await NodeManager.get_one(db=db, id=person_john_main.id, branch=branch) @@ -114,7 +118,8 @@ async def test_update_simple_object_with_enum( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -122,6 +127,7 @@ async def test_update_simple_object_with_enum( root_value=None, variable_values={}, ) + assert result.data car_id = result.data["TestCarCreate"]["object"]["id"] query = """ @@ -140,7 +146,8 @@ async def test_update_simple_object_with_enum( } } """ % {"car_id": car_id, "enum_value": enum_value} - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -150,6 +157,7 @@ async def test_update_simple_object_with_enum( ) assert result.errors is None + assert result.data assert result.data["TestCarUpdate"]["ok"] is True assert result.data["TestCarUpdate"]["object"]["transmission"]["value"] == response_value @@ -174,7 +182,8 @@ async def test_update_check_unique(db: InfrahubDatabase, person_john_main: Node, """ % person_john_main.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -208,7 +217,8 @@ async def test_update_object_with_flag_property(db: InfrahubDatabase, person_joh """ % person_john_main.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -218,6 +228,7 @@ async def test_update_object_with_flag_property(db: InfrahubDatabase, person_joh ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True obj1 = await NodeManager.get_one(db=db, id=person_john_main.id, branch=branch) @@ -271,8 +282,9 @@ async def test_update_all_attributes( memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + default_branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=default_branch, service=service, account_session=session_first_account + db=db, branch=default_branch, service=service, account_session=session_first_account ) result = await graphql( schema=gql_params.schema, @@ -342,7 +354,8 @@ async def test_update_object_with_node_property( second_account.id, second_account.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -352,6 +365,7 @@ async def test_update_object_with_node_property( ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True obj1 = await NodeManager.get_one(db=db, id=person_john_with_source_main.id, include_source=True, branch=branch) @@ -373,7 +387,8 @@ async def test_update_invalid_object(db: InfrahubDatabase, default_branch: Branc } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -382,6 +397,7 @@ async def test_update_invalid_object(db: InfrahubDatabase, default_branch: Branc variable_values={}, ) + assert result.errors assert len(result.errors) == 1 assert "Unable to find the node XXXXXX / TestPerson in the database." in result.errors[0].message @@ -403,7 +419,8 @@ async def test_update_invalid_input(db: InfrahubDatabase, person_john_main: Node """ % person_john_main.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -412,6 +429,7 @@ async def test_update_invalid_input(db: InfrahubDatabase, person_john_main: Node variable_values={}, ) + assert result.errors assert len(result.errors) == 1 assert "String cannot represent a non string value" in result.errors[0].message @@ -447,8 +465,9 @@ async def test_update_single_relationship( ) memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=branch, service=service, account_session=session_first_account + db=db, branch=branch, service=service, account_session=session_first_account ) result = await graphql( schema=gql_params.schema, @@ -510,7 +529,8 @@ async def test_update_default_value( } } """ % (car_accord_main.id) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -520,6 +540,7 @@ async def test_update_default_value( ) assert result.errors is None + assert result.data assert result.data["TestCarUpdate"]["ok"] is True assert result.data["TestCarUpdate"]["object"]["color"]["is_default"] is False @@ -543,7 +564,8 @@ async def test_update_default_value( } """ % (car_accord_main.id) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -553,6 +575,7 @@ async def test_update_default_value( ) assert result.errors is None + assert result.data assert result.data["TestCarUpdate"]["ok"] is True assert result.data["TestCarUpdate"]["object"]["color"]["is_default"] is False @@ -580,7 +603,8 @@ async def test_update_default_value( } """ % (car_accord_main.id) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -590,6 +614,7 @@ async def test_update_default_value( ) assert result.errors is None + assert result.data assert result.data["TestCarUpdate"]["ok"] is True assert result.data["TestCarUpdate"]["object"]["color"]["is_default"] is True assert result.data["TestCarUpdate"]["object"]["transmission"]["value"] is None @@ -626,7 +651,8 @@ async def test_update_new_single_relationship_flag_property( car_accord_main.id, person_jim_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -636,6 +662,7 @@ async def test_update_new_single_relationship_flag_property( ) assert result.errors is None + assert result.data assert result.data["TestCarUpdate"]["ok"] is True assert result.data["TestCarUpdate"]["object"]["owner"]["node"]["name"]["value"] == "Jim" @@ -669,7 +696,8 @@ async def test_update_delete_optional_relationship_cardinality_one( car_accord_main.id, person_jim_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -679,6 +707,7 @@ async def test_update_delete_optional_relationship_cardinality_one( ) assert result.errors is None + assert result.data assert result.data["TestCarUpdate"]["ok"] is True assert result.data["TestCarUpdate"]["object"]["owner"]["node"]["name"]["value"] == "Jim" @@ -705,7 +734,8 @@ async def test_update_delete_optional_relationship_cardinality_one( } } """ % (car_accord_main.id,) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -715,6 +745,7 @@ async def test_update_delete_optional_relationship_cardinality_one( ) assert result.errors is None + assert result.data assert result.data["TestCarUpdate"]["ok"] is True assert result.data["TestCarUpdate"]["object"]["owner"]["node"] is None car = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=branch) @@ -745,7 +776,8 @@ async def test_update_existing_single_relationship_flag_property( car_accord_main.id, person_john_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -755,6 +787,7 @@ async def test_update_existing_single_relationship_flag_property( ) assert result.errors is None + assert result.data assert result.data["TestCarUpdate"]["ok"] is True assert result.data["TestCarUpdate"]["object"]["owner"]["node"]["name"]["value"] == "John" @@ -811,7 +844,8 @@ async def test_update_existing_single_relationship_node_property( person_john_main.id, second_account.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -821,6 +855,7 @@ async def test_update_existing_single_relationship_node_property( ) assert result.errors is None + assert result.data assert result.data["TestCarUpdate"]["ok"] is True assert result.data["TestCarUpdate"]["object"]["owner"]["node"]["name"]["value"] == "John" @@ -863,7 +898,8 @@ async def test_update_relationship_many( person_jack_main.id, tag_blue_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -873,6 +909,7 @@ async def test_update_relationship_many( ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True assert len(result.data["TestPersonUpdate"]["object"]["tags"]["edges"]) == 1 @@ -903,7 +940,8 @@ async def test_update_relationship_many( tag_red_main.id, tag_black_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -913,6 +951,7 @@ async def test_update_relationship_many( ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True assert len(result.data["TestPersonUpdate"]["object"]["tags"]["edges"]) == 2 @@ -945,7 +984,8 @@ async def test_update_relationship_many( tag_blue_main.id, tag_black_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -955,6 +995,7 @@ async def test_update_relationship_many( ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True assert len(result.data["TestPersonUpdate"]["object"]["tags"]["edges"]) == 2 @@ -994,7 +1035,8 @@ async def test_update_relationship_many2( person_jack_main.id, tag_blue_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1004,6 +1046,7 @@ async def test_update_relationship_many2( ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True assert len(result.data["TestPersonUpdate"]["object"]["tags"]["edges"]) == 1 @@ -1034,7 +1077,8 @@ async def test_update_relationship_many2( tag_red_main.id, tag_black_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1044,6 +1088,7 @@ async def test_update_relationship_many2( ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True assert len(result.data["TestPersonUpdate"]["object"]["tags"]["edges"]) == 2 @@ -1080,7 +1125,8 @@ async def test_update_relationship_previously_deleted( person_jack_main.id, tag_blue_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1090,6 +1136,7 @@ async def test_update_relationship_previously_deleted( ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True assert len(result.data["TestPersonUpdate"]["object"]["tags"]) == 1 @@ -1116,7 +1163,8 @@ async def test_update_relationship_previously_deleted( tag_red_main.id, tag_black_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1126,6 +1174,7 @@ async def test_update_relationship_previously_deleted( ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True assert len(result.data["TestPersonUpdate"]["object"]["tags"]) == 2 @@ -1153,7 +1202,8 @@ async def test_update_relationship_previously_deleted( tag_blue_main.id, tag_black_main.id, ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1163,6 +1213,7 @@ async def test_update_relationship_previously_deleted( ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] is True assert len(result.data["TestPersonUpdate"]["object"]["tags"]) == 2 @@ -1227,7 +1278,8 @@ async def test_update_for_node_with_migrated_kind( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=update_group_query, @@ -1238,7 +1290,8 @@ async def test_update_for_node_with_migrated_kind( assert not result.errors # add person to group on branch - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=update_group_query, @@ -1264,7 +1317,8 @@ async def test_update_for_node_with_migrated_kind( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=group_members_query, @@ -1277,7 +1331,8 @@ async def test_update_for_node_with_migrated_kind( assert result.data["CoreStandardGroup"]["edges"][0]["node"]["members"]["count"] == 1 # check relationship count on branch - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=group_members_query, @@ -1389,7 +1444,8 @@ async def test_update_with_uniqueness_constraint_violation(db: InfrahubDatabase, % c2.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1397,6 +1453,7 @@ async def test_update_with_uniqueness_constraint_violation(db: InfrahubDatabase, root_value=None, variable_values={}, ) + assert result.errors assert len(result.errors) == 1 assert "Violates uniqueness constraint 'owner-color'" in result.errors[0].message @@ -1429,7 +1486,8 @@ async def test_with_hfid(db: InfrahubDatabase, default_branch, animal_person_sch } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1438,6 +1496,7 @@ async def test_with_hfid(db: InfrahubDatabase, default_branch, animal_person_sch variable_values={}, ) assert result.errors is None + assert result.data assert result.data["TestDogUpdate"]["ok"] is True assert result.data["TestDogUpdate"]["object"] == {"color": {"value": "black"}, "id": dog1.id} @@ -1472,7 +1531,8 @@ async def test_incorrect_peer_type_prevented(db: InfrahubDatabase, default_branc } } """ % {"person_id": person1.id, "animal_id": person2.id} - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1544,7 +1604,8 @@ async def test_removing_mandatory_relationship_not_allowed(db: InfrahubDatabase, } } """ % {"animal_id": dog1.id} - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1590,7 +1651,8 @@ async def test_updating_relationship_when_peer_side_is_required( } } """ % {"person_id": person1.id, "animal1_id": dog1.id, "animal2_id": dog2.id} - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1636,7 +1698,8 @@ async def test_updating_relationship_when_peer_side_is_optional( } } """ % {"person_id": person1.id, "animal1_id": dog1.id, "animal2_id": dog2.id} - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -1645,6 +1708,7 @@ async def test_updating_relationship_when_peer_side_is_optional( variable_values={}, ) assert result.errors is None + assert result.data assert result.data["TestPersonUpdate"]["ok"] updated_nodes = await NodeManager.get_many(db=db, ids=[person1.id, person2.id, dog1.id, dog2.id]) diff --git a/backend/tests/unit/graphql/test_mutation_upsert.py b/backend/tests/unit/graphql/test_mutation_upsert.py index a7f395d743..dfbf807b75 100644 --- a/backend/tests/unit/graphql/test_mutation_upsert.py +++ b/backend/tests/unit/graphql/test_mutation_upsert.py @@ -27,7 +27,8 @@ async def test_upsert_existing_simple_object_by_id(db: InfrahubDatabase, person_ """ % person_john_main.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -68,7 +69,8 @@ async def test_upsert_existing_simple_object_by_default_filter( } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -106,8 +108,9 @@ async def test_upsert_event_on_no_change( """ memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=branch, service=service, account_session=session_first_account + db=db, branch=branch, service=service, account_session=session_first_account ) result = await graphql( schema=gql_params.schema, @@ -135,8 +138,9 @@ async def test_upsert_event_on_no_change( memory_event = MemoryInfrahubEvent() service = await InfrahubServices.new(event=memory_event) + branch.update_schema_hash() gql_params = await prepare_graphql_params( - db=db, include_subscription=False, branch=branch, service=service, account_session=session_first_account + db=db, branch=branch, service=service, account_session=session_first_account ) result_second_time = await graphql( schema=gql_params.schema, @@ -166,7 +170,8 @@ async def test_upsert_create_simple_object_no_id(db: InfrahubDatabase, person_jo } """ % ("Ellen Ripley", 179) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -198,7 +203,8 @@ async def test_id_for_other_schema_raises_error( """ % car_accord_main.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -225,7 +231,8 @@ async def test_update_by_id_to_nonunique_value_raises_error( """ % person_john_main.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -251,7 +258,8 @@ async def test_non_unique_value_raises_error(db: InfrahubDatabase, person_schema } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -259,6 +267,7 @@ async def test_non_unique_value_raises_error(db: InfrahubDatabase, person_schema root_value=None, variable_values={}, ) + assert result.errors assert len(result.errors) == 1 assert "Violates uniqueness constraint 'bag'" in result.errors[0].message @@ -302,7 +311,8 @@ async def test_upsert_existing_with_enough_information_for_hfid( } } """ % {"id1": thing1.id} - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -335,7 +345,8 @@ async def test_upsert_existing_with_enough_information_for_hfid( } } """ % {"id1": thing1.id, "id2": thing2.id} - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -349,7 +360,8 @@ async def test_upsert_existing_with_enough_information_for_hfid( # delete the TestThing.car relationship and try again await thing2.car.update(db=db, data=[None]) await thing2.save(db=db) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -358,6 +370,7 @@ async def test_upsert_existing_with_enough_information_for_hfid( variable_values={"car_name": car_name, "owner_id": fred.id, "color": car_color_2}, ) assert not result.errors + assert result.data assert result.data["TestCarUpsert"]["object"]["id"] == car.id assert result.data["TestCarUpsert"]["object"]["color"]["value"] == car_color_2 assert result.data["TestCarUpsert"]["object"]["owner"]["node"]["id"] == fred.id @@ -386,8 +399,8 @@ async def test_upsert_existing_hfid_with_non_hfid_unique_attr( } } """ - - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, @@ -432,7 +445,8 @@ async def test_with_hfid_existing(db: InfrahubDatabase, default_branch, animal_p """ % person1.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -487,7 +501,8 @@ async def test_with_hfid_new(db: InfrahubDatabase, default_branch, animal_person % person1.id ) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -544,7 +559,8 @@ async def test_with_constructed_hfid(db: InfrahubDatabase, default_branch, anima } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) # Create initial node initial_weight = 14 @@ -622,7 +638,8 @@ async def test_with_constructed_hfid_with_numbers( } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) update_result = await graphql( schema=gql_params.schema, @@ -657,7 +674,8 @@ async def test_upsert_node_on_branch_with_hfid_on_default(db: InfrahubDatabase, } } """ - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=branch) + branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=branch) result = await graphql( schema=gql_params.schema, source=query, diff --git a/backend/tests/unit/graphql/test_parser.py b/backend/tests/unit/graphql/test_parser.py index a5fb5a3778..13786b9cf9 100644 --- a/backend/tests/unit/graphql/test_parser.py +++ b/backend/tests/unit/graphql/test_parser.py @@ -5,7 +5,7 @@ from tests.helpers.graphql import graphql -async def test_simple_directive(db: InfrahubDatabase, default_branch: Branch, criticality_schema): +async def test_simple_directive(db: InfrahubDatabase, default_branch: Branch, criticality_schema) -> None: obj1 = await Node.init(db=db, schema=criticality_schema) await obj1.new(db=db, name="low", level=4) await obj1.save(db=db) @@ -32,9 +32,8 @@ async def test_simple_directive(db: InfrahubDatabase, default_branch: Branch, cr } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, @@ -68,7 +67,7 @@ async def test_simple_directive(db: InfrahubDatabase, default_branch: Branch, cr } in result.data["TestCriticality"]["edges"] -async def test_directive_exclude(db: InfrahubDatabase, default_branch: Branch, criticality_schema): +async def test_directive_exclude(db: InfrahubDatabase, default_branch: Branch, criticality_schema) -> None: obj1 = await Node.init(db=db, schema=criticality_schema) await obj1.new(db=db, name="low", level=4) await obj1.save(db=db) @@ -89,9 +88,8 @@ async def test_directive_exclude(db: InfrahubDatabase, default_branch: Branch, c } """ - gql_params = await prepare_graphql_params( - db=db, include_mutation=False, include_subscription=False, branch=default_branch - ) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) result = await graphql( schema=gql_params.schema, source=query, diff --git a/backend/tests/unit/graphql/test_query.py b/backend/tests/unit/graphql/test_query.py index 0828fc38f8..7c5fc986bf 100644 --- a/backend/tests/unit/graphql/test_query.py +++ b/backend/tests/unit/graphql/test_query.py @@ -41,7 +41,9 @@ async def execute_query( raise ValueError(f"Unable to find the {InfrahubKind.GRAPHQLQUERY} {name}") gql_params = await prepare_graphql_params( - branch=branch, db=db, at=at, include_mutation=False, include_subscription=False + branch=branch, + db=db, + at=at, ) result = await graphql( @@ -55,7 +57,7 @@ async def execute_query( return result -async def test_execute_query(db: InfrahubDatabase, default_branch: Branch, register_core_models_schema): +async def test_execute_query(db: InfrahubDatabase, default_branch: Branch, register_core_models_schema) -> None: t1 = await Node.init(db=db, schema=InfrahubKind.TAG, branch=default_branch) await t1.new(db=db, name="Blue", description="The Blue tag") await t1.save(db=db) @@ -68,13 +70,15 @@ async def test_execute_query(db: InfrahubDatabase, default_branch: Branch, regis await q1.new(db=db, name="query01", query="query { BuiltinTag { count }}") await q1.save(db=db) + default_branch.update_schema_hash() result = await execute_query(name="query01", db=db, branch=default_branch) assert result.errors is None assert result.data == {"BuiltinTag": {"count": 2}} -async def test_execute_missing_query(db: InfrahubDatabase, default_branch: Branch, register_core_models_schema): +async def test_execute_missing_query(db: InfrahubDatabase, default_branch: Branch, register_core_models_schema) -> None: + default_branch.update_schema_hash() with pytest.raises(ValueError) as exc: await execute_query(name="query02", db=db, branch=default_branch) diff --git a/backend/tests/unit/graphql/test_query_analyzer.py b/backend/tests/unit/graphql/test_query_analyzer.py index d297f79e71..01a61f997b 100644 --- a/backend/tests/unit/graphql/test_query_analyzer.py +++ b/backend/tests/unit/graphql/test_query_analyzer.py @@ -7,7 +7,6 @@ from infrahub.database import InfrahubDatabase from infrahub.graphql.analyzer import GraphQLArgument, GraphQLVariable, InfrahubGraphQLQueryAnalyzer, MutateAction from infrahub.graphql.initialization import prepare_graphql_params -from infrahub.graphql.registry import registry as graphql_registry from tests.helpers.schema.color import COLOR from tests.helpers.schema.tshirt import TSHIRT @@ -16,7 +15,8 @@ async def test_analyzer_init_with_schema( db: InfrahubDatabase, default_branch: Branch, car_person_schema_generics, query_01: str, bad_query_01: str ): schema_branch = registry.schema.get_schema_branch(name=default_branch.name) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) gqa = InfrahubGraphQLQueryAnalyzer( query=query_01, schema=gql_params.schema, branch=default_branch, schema_branch=schema_branch ) @@ -35,8 +35,8 @@ async def test_is_valid_simple_schema( car_person_schema_generics, ): schema_branch = registry.schema.get_schema_branch(name=default_branch.name) - graphql_registry.clear_cache() - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) gqa = InfrahubGraphQLQueryAnalyzer( query=query_01, schema=gql_params.schema, branch=default_branch, schema_branch=schema_branch ) @@ -80,7 +80,8 @@ async def test_is_valid_core_schema( register_core_models_schema, ): schema_branch = registry.schema.get_schema_branch(name=default_branch.name) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) gqa = InfrahubGraphQLQueryAnalyzer( query=query_05, schema=gql_params.schema, branch=default_branch, schema_branch=schema_branch @@ -99,7 +100,8 @@ async def test_get_models_in_use( car_person_schema_generics, ): schema_branch = registry.schema.get_schema_branch(name=default_branch.name) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) gqa = InfrahubGraphQLQueryAnalyzer( query=query_01, schema=gql_params.schema, branch=default_branch, schema_branch=schema_branch ) @@ -183,7 +185,8 @@ async def test_get_models_in_use( async def test_query_report(db: InfrahubDatabase, default_branch: Branch, car_person_schema_generics): schema_branch = registry.schema.get_schema_branch(name=default_branch.name) - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) mutation_query_no_return_data = """ mutation { @@ -409,7 +412,8 @@ async def test_query_report_single_target( schema_branch = registry.schema.get_schema_branch(name=default_branch.name) - gql_params = await prepare_graphql_params(db=db, branch=default_branch, include_subscription=False) + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) query_name_variable_required = """ query TshirtQuery($name: String!) { diff --git a/backend/tests/unit/graphql/test_schema.py b/backend/tests/unit/graphql/test_schema.py index e51dcb4d2d..772fd958d5 100644 --- a/backend/tests/unit/graphql/test_schema.py +++ b/backend/tests/unit/graphql/test_schema.py @@ -1,11 +1,15 @@ from graphql.type.definition import GraphQLList, GraphQLNonNull, GraphQLObjectType +from infrahub.core.branch import Branch from infrahub.database import InfrahubDatabase from infrahub.graphql.initialization import prepare_graphql_params -async def test_schema_is_nonnull(db: InfrahubDatabase, default_branch, car_person_schema): - gql_params = await prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) +async def test_schema_is_nonnull(db: InfrahubDatabase, default_branch: Branch, car_person_schema: None) -> None: + default_branch.update_schema_hash() + gql_params = await prepare_graphql_params(db=db, branch=default_branch) + + assert gql_params.schema.query_type for name, field in gql_params.schema.query_type.fields.items(): # ------------------------------------------------------------ diff --git a/changelog/+artifact.fixed.md b/changelog/+artifact.fixed.md new file mode 100644 index 0000000000..baefaa8cbb --- /dev/null +++ b/changelog/+artifact.fixed.md @@ -0,0 +1 @@ +The artifact count has been removed from the Proposed Changes list view. diff --git a/changelog/+diff-summary.fixed.md b/changelog/+diff-summary.fixed.md new file mode 100644 index 0000000000..830ccccf8a --- /dev/null +++ b/changelog/+diff-summary.fixed.md @@ -0,0 +1 @@ +Fixed incorrect data diff counter when viewing a branch or proposed changes diff --git a/changelog/+profile-schema-update.added.md b/changelog/+profile-schema-update.added.md new file mode 100644 index 0000000000..2c7739ae2b --- /dev/null +++ b/changelog/+profile-schema-update.added.md @@ -0,0 +1 @@ +Add support for updating existing Profiles when the associated node or generic schema is updated to change an attribute's optional or read-only value or when an attribute is added or removed \ No newline at end of file diff --git a/changelog/7407.fixed.md b/changelog/7407.fixed.md new file mode 100644 index 0000000000..5ba7373deb --- /dev/null +++ b/changelog/7407.fixed.md @@ -0,0 +1 @@ +Fix a bug that could cause duplicated attributes to be created when updating a generic schema with a new attribute. Includes a migration to fix any existing duplicated attributes created by this bug. \ No newline at end of file diff --git a/changelog/7431.added.md b/changelog/7431.added.md new file mode 100644 index 0000000000..b907bc7cd0 --- /dev/null +++ b/changelog/7431.added.md @@ -0,0 +1,2 @@ +- Schema Visualizer now displays `on_delete` settings for relationships +- Fixed display of common_parent settings in relationships. diff --git a/docs/docs/guides/object-conversion.mdx b/docs/docs/guides/object-conversion.mdx index 9112970f66..3b3f52a24d 100644 --- a/docs/docs/guides/object-conversion.mdx +++ b/docs/docs/guides/object-conversion.mdx @@ -1,28 +1,233 @@ --- -title: Converting an Object Type +title: How to convert object types --- -It may happen that you created an object with a type A, and would like to convert it to a similar type B. Infrahub offers a way to convert an object type, -without having to manually deleting and recreating it. +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; -## How to convert an object? +# How to convert object types -Currently, mutation `ConvertObjectType` is the only way to convert an object type. It will soon be possible to do it from both the web application and the SDK. -Converting an object requires that any mandatory field present in the destination schema type to be specified. Note that equivalent fields, if not specified, are automatically filled in: +This guide shows how to convert an existing object from one type to another without having to manually delete and recreate it. This is useful when you need to change an object's schema type while preserving its data and relationships. -- Two attribute fields in two distinct schemas are considered to be equivalent if they have both the same name and value type. -- Two relationship fields in two distinct schemas are considered to be equivalent if they have the same name, peer schema type, and cardinality. +## Introduction -# Case of agnostic object having branch aware attributes +Object type conversion allows you to transform an object from one schema type to another compatible type. For example, you might need to convert a Layer 3 interface to a Layer 2 interface, or change a read-only repository to a read-write repository. -For technical reasons, converting an agnostic object having branch aware attribute would remove values of these attributes on the non-conversion branches. -Therefore, converting such an object has some current limitations: +The conversion process automatically maps equivalent fields between the source and destination types, and allows you to specify values for any mandatory fields that don't have equivalent mappings. -- Conversion is only allowed on the default branch (usually `main`). -- Doing such a conversion would automatically put other branches in a `NEED_REBASE` state. No write-operation can be performed on a branch with this state until it is rebased. -- Branch aware attributes values on other branches are lost (these attributes no longer exist on those branches). +## Prerequisites -# Converting a repository +Before starting, ensure you have: -It is possible to convert a read-only repository to a read-write repository and vice versa. As they are both agnostic objects having branch aware attributes (`commit`), -they follow above restrictions on converting agnostic objects having branch aware attributes. +- An existing object that you want to convert +- Access to Infrahub through the web interface, GraphQL API, or Python SDK +- Understanding of both the source and destination schema types +- Values ready for any mandatory fields in the destination type that don't exist in the source type + +## Step 1: Identify equivalent fields + +Infrahub automatically identifies equivalent fields between source and destination types based on these criteria: + +- **Attribute fields** are equivalent if they have the same name and value type +- **Relationship fields** are equivalent if they have the same name, peer schema type, and cardinality + +Fields that match these criteria will be automatically mapped during conversion. + +## Step 2: Convert the object type + + + + +Navigate to the object you want to convert and select **Convert object type** from the actions dropdown menu. + +![Convert Object Button](../media/object_convert_button.png) + +Infrahub displays a mapping interface showing how fields from the source type will map to the destination type. The interface automatically maps equivalent fields and highlights any mandatory fields that require values. + +![Convert Object](../media/object_convert_mapping.png) + +Review the field mappings and provide values for any mandatory fields that don't have automatic mappings. Click **Convert** to complete the process. + + + + + +First, query the field mapping between source and destination types to understand what values you need to provide: + +```graphql +query GetMapping { + FieldsMappingTypeConversion( + source_kind: "InfraInterfaceL3" + target_kind: "InfraInterfaceL2" + ) { + mapping + } +} +``` + +The response shows the mapping between source and destination fields, indicating which fields are mandatory and which have automatic mappings: + +```json +{ + "data": { + "FieldsMappingTypeConversion": { + "mapping": { + "lacp_priority": { + "is_mandatory": false, + "source_field_name": "lacp_priority", + "relationship_cardinality": null + }, + "l2_mode": { + "is_mandatory": true, + "source_field_name": null, + "relationship_cardinality": null + }, + "name": { + "is_mandatory": true, + "source_field_name": "name", + "relationship_cardinality": null + }, + "device": { + "is_mandatory": true, + "source_field_name": "device", + "relationship_cardinality": "one" + } + // Additional fields omitted for brevity + } + } + } +} +``` + +Use this mapping information to build the conversion mutation, providing values for mandatory fields that don't have source mappings: + +```graphql +mutation ConvertType { + ConvertObjectType( + data: { + node_id: "18703746-7b3b-442e-3027-10657106d6f9" + target_kind: "InfraInterfaceL2" + fields_mapping: { + device: { + source_field: "device" + }, + l2_mode: { + data: { + attribute_value: "Access" + } + }, + name: { + source_field: "name" + }, + connected_endpoint: { + source_field: "connected_endpoint" + }, + lag: { + use_default_value: true + }, + untagged_vlan: { + use_default_value: true + }, + tagged_vlan: { + use_default_value: true + } + // Additional field mappings as needed + } + } + ) { + node + __typename + } +} +``` + + + + +Use the Python SDK to convert an object type by first retrieving the object and then calling the conversion method: + +```python +from infrahub_sdk import InfrahubClientSync +from infrahub_sdk.convert_object_type import ConversionFieldInput, ConversionFieldValue + +client = InfrahubClientSync(address="http://localhost:8000") + +# Get the object to convert +interface = client.get( + kind="InfraInterfaceL3", + name__value="Ethernet1", + device__name__value="ord1-edge2" +) + +# Define field mappings for the conversion +fields_mapping = { + "device": ConversionFieldInput(source_field="device"), + "l2_mode": ConversionFieldInput( + data=ConversionFieldValue(attribute_value="Access") + ), + "name": ConversionFieldInput(source_field="name"), + "connected_endpoint": ConversionFieldInput(source_field="connected_endpoint"), + "speed": ConversionFieldInput(source_field="speed"), + "lag": ConversionFieldInput(use_default_value=True), + "untagged_vlan": ConversionFieldInput(use_default_value=True), + "tagged_vlan": ConversionFieldInput(use_default_value=True), + # Add additional mappings as needed +} + +# Perform the conversion +new_interface = client.convert_object_type( + node_id=interface.id, + target_kind="InfraInterfaceL2", + branch=client.default_branch, + fields_mapping=fields_mapping, +) + +print(f"Conversion successful. New interface kind: {new_interface.get_kind()}") +``` + + + + +## Step 3: Verify the conversion + +After converting the object, verify that the conversion was successful: + +1. **Check the object type**: Confirm that the object now shows the correct destination type +2. **Verify field values**: Ensure that all data was transferred correctly and new mandatory fields have appropriate values +3. **Test relationships**: Confirm that relationships to other objects are still intact and functioning properly + +:::success +If the conversion completed without errors, your object has been successfully converted to the new type with all compatible data preserved. +::: + +## Advanced usage + +### Converting objects with branch-aware attributes + +Objects that have branch-aware attributes require special consideration during conversion: + +:::warning +Converting objects with branch-aware attributes has important limitations that affect other branches in your repository. +::: + +**Limitations:** + +- Conversion is only allowed on the default branch (usually `main`) +- Other branches will automatically enter a `NEED_REBASE` state after conversion +- Branch-aware attribute values on non-conversion branches will be lost +- No write operations can be performed on affected branches until they are rebased + +### Handling mandatory fields without source mappings + +When the destination type has mandatory fields that don't exist in the source type, you must provide values: + +- **Use explicit values**: Provide specific attribute values or relationship targets +- **Use default values**: Allow Infrahub to use schema-defined defaults where available +- **Use computed values**: Let relationship fields use computed defaults when appropriate + +## Related resources + +- [Object conversion topic](../topics/object-conversion) - Understanding the concepts behind object conversion +- [Schema management guide](./create-schema) - Creating and modifying schema types + +git checkout -b atg-20251020-infp-258 diff --git a/docs/docs/media/object_convert_button.png b/docs/docs/media/object_convert_button.png new file mode 100644 index 0000000000..a07910aadf Binary files /dev/null and b/docs/docs/media/object_convert_button.png differ diff --git a/docs/docs/media/object_convert_mapping.png b/docs/docs/media/object_convert_mapping.png new file mode 100644 index 0000000000..e11a79e5e9 Binary files /dev/null and b/docs/docs/media/object_convert_mapping.png differ diff --git a/docs/docs/reference/schema/validator-migration.mdx b/docs/docs/reference/schema/validator-migration.mdx index ff02f33c50..3f49730e64 100644 --- a/docs/docs/reference/schema/validator-migration.mdx +++ b/docs/docs/reference/schema/validator-migration.mdx @@ -63,9 +63,9 @@ In this context, an element represent either a Node, a Generic, an Attribute or | **min_length** | validate_constraint | | **label** | allowed | | **description** | allowed | -| **read_only** | allowed | +| **read_only** | migration_required | | **unique** | validate_constraint | -| **optional** | validate_constraint | +| **optional** | migration_required | | **branch** | not_supported | | **order_weight** | allowed | | **default_value** | allowed | diff --git a/docs/docs/topics/object-conversion.mdx b/docs/docs/topics/object-conversion.mdx new file mode 100644 index 0000000000..cff2cbb7c4 --- /dev/null +++ b/docs/docs/topics/object-conversion.mdx @@ -0,0 +1,106 @@ +--- +title: Object conversion +--- + +# Object conversion + +Object conversion in Infrahub provides a powerful mechanism to transform existing objects from one schema type to another without losing data or breaking relationships. This capability addresses the common infrastructure management challenge of evolving data models while preserving existing configurations and connections. + +## Introduction + +In dynamic infrastructure environments, the need to change an object's type often arises as requirements evolve. Traditional approaches typically require deleting the original object and manually recreating it with the new type, leading to data loss and broken relationships. Infrahub's object conversion feature eliminates this friction by providing a seamless transformation process. + +## How object conversion works + +### Field mapping and equivalence + +Object conversion operates on the principle of field equivalence between source and destination schema types. The system automatically identifies compatible fields using specific criteria: + +**Attribute field equivalence:** + +- Fields must have identical names +- Fields must have compatible value types +- Data validation rules are preserved during conversion + +**Relationship field equivalence:** + +- Fields must have identical names +- Fields must reference the same peer schema type +- Cardinality constraints (one-to-one, one-to-many) must match + +### Automatic mapping process + +When initiating a conversion, Infrahub performs these steps: + +1. **Schema analysis**: Compares source and destination schema definitions to identify equivalent fields +2. **Compatibility check**: Validates that the conversion is technically feasible +3. **Mandatory field identification**: Highlights destination fields that require explicit values +4. **Data transformation**: Executes the conversion while preserving data integrity + +### Handling non-equivalent fields + +Not all fields between schema types will have direct equivalents. The conversion process handles these scenarios: + +**Missing source fields:** + +- Destination fields without source equivalents must be manually specified +- Mandatory destination fields require explicit values or use of defaults +- Optional fields can leverage schema-defined default values + +**Surplus source fields:** + +- Source fields without destination equivalents are gracefully ignored +- Data from these fields is not transferred but doesn't impede conversion +- Historical data remains accessible through version control + +## Design considerations and constraints + +### Version control integration + +Object conversion integrates deeply with Infrahub's Git-like branching model, but this creates important constraints: + +**Branch-aware attributes:** + +Objects containing branch-aware attributes (fields that can have different values across branches) require special handling. These conversions: + +- Must occur on the default branch to maintain data consistency +- Trigger automatic rebase requirements for other branches +- May result in data loss on non-conversion branches + +**Commit preservation:** +The conversion process maintains object history and commit lineage, ensuring auditability and rollback capabilities. + +### Data integrity safeguards + +Several mechanisms protect data integrity during conversion: + +**Validation enforcement:** + +- All destination schema constraints are validated before conversion +- Referential integrity for relationships is maintained +- Data type compatibility is strictly enforced + +**Atomic operations:** + +- Conversions execute as atomic transactions +- Failures result in complete rollback with no partial state +- Concurrent modifications are handled through Infrahub's conflict resolution + +## Common use cases + +### Infrastructure evolution + +**Network interface type changes:** +Converting between Layer 2 and Layer 3 interfaces as network designs evolve, preserving device associations and configuration parameters while adapting to new operational requirements. + +**Repository access model changes:** +Transforming read-only repositories to read-write repositories when teams gain modification privileges, maintaining Git integration while expanding access controls. + +## Related concepts + +Object conversion intersects with several other Infrahub concepts: + +- **[Schema management](./schema)**: Understanding how schema types define conversion possibilities +- **[Version control](./version-control)**: How conversions interact with branch and merge operations +- **[Metadata](./metadata)**: How object metadata is preserved during conversion +- **[GraphQL](./graphql)**: The API mechanisms that enable conversion operations \ No newline at end of file diff --git a/docs/sidebars.ts b/docs/sidebars.ts index aafdfb48fc..5d7195f73f 100644 --- a/docs/sidebars.ts +++ b/docs/sidebars.ts @@ -247,6 +247,7 @@ const sidebars: SidebarsConfig = { 'topics/metadata', 'topics/groups', 'topics/graphql', + 'topics/object-conversion', 'topics/resource-manager', 'topics/object-template', 'topics/profiles', diff --git a/frontend/app/src/entities/diff/api/get-diff-tree-summary-from-api.ts b/frontend/app/src/entities/diff/api/get-diff-tree-summary-from-api.ts new file mode 100644 index 0000000000..abe66ad7d0 --- /dev/null +++ b/frontend/app/src/entities/diff/api/get-diff-tree-summary-from-api.ts @@ -0,0 +1,30 @@ +import { gql } from "@apollo/client"; + +import type { + Get_Diff_Tree_SummaryQuery, + Get_Diff_Tree_SummaryQueryVariables, +} from "@/shared/api/graphql/generated/graphql"; +import graphqlClient from "@/shared/api/graphql/graphqlClientApollo"; + +export const GET_PROPOSED_CHANGES_DIFF_SUMMARY = gql` + query GET_DIFF_TREE_SUMMARY($branch: String, $filters: DiffTreeQueryFilters) { + DiffTreeSummary(branch: $branch, filters: $filters) { + num_added + num_updated + num_removed + num_conflicts + } + } +`; + +export interface GetDiffTreeSummaryFromApiParams extends Get_Diff_Tree_SummaryQueryVariables {} + +export function getDiffTreeSummaryFromApi(variables: Get_Diff_Tree_SummaryQueryVariables) { + return graphqlClient.query({ + query: GET_PROPOSED_CHANGES_DIFF_SUMMARY, + variables, + context: { + processErrorMessage: () => {}, + }, + }); +} diff --git a/frontend/app/src/entities/diff/domain/get-diff-summary.query.ts b/frontend/app/src/entities/diff/domain/get-diff-summary.query.ts new file mode 100644 index 0000000000..0163fda309 --- /dev/null +++ b/frontend/app/src/entities/diff/domain/get-diff-summary.query.ts @@ -0,0 +1,18 @@ +import { queryOptions, useQuery } from "@tanstack/react-query"; + +import type { QueryConfig } from "@/shared/api/types"; + +import { type GetDiffSummaryParams, getDiffSummary } from "@/entities/diff/domain/get-diff-summary"; + +export function getDiffSummaryQueryOptions({ branch, filters }: GetDiffSummaryParams) { + return queryOptions({ + queryKey: ["diff-summary", branch, filters], + queryFn: () => getDiffSummary({ branch, filters }), + }); +} + +export type UseGetDiffSummaryConfig = QueryConfig; + +export function useGetDiffSummary(params: GetDiffSummaryParams, config?: UseGetDiffSummaryConfig) { + return useQuery({ ...getDiffSummaryQueryOptions(params), ...config }); +} diff --git a/frontend/app/src/entities/diff/domain/get-diff-summary.ts b/frontend/app/src/entities/diff/domain/get-diff-summary.ts new file mode 100644 index 0000000000..6d59417e23 --- /dev/null +++ b/frontend/app/src/entities/diff/domain/get-diff-summary.ts @@ -0,0 +1,27 @@ +import { + type GetDiffTreeSummaryFromApiParams, + getDiffTreeSummaryFromApi, +} from "@/entities/diff/api/get-diff-tree-summary-from-api"; + +export type GetDiffSummaryParams = GetDiffTreeSummaryFromApiParams; + +export type GetDiffSummaryResponse = { + num_added: number; + num_updated: number; + num_removed: number; + num_conflicts: number; +}; + +export type GetDiffSummary = ( + params: GetDiffSummaryParams +) => Promise; + +export const getDiffSummary: GetDiffSummary = async (params) => { + const { data, errors } = await getDiffTreeSummaryFromApi(params); + + if (errors) { + throw new Error(errors.map((e) => e.message).join("; ")); + } + + return (data.DiffTreeSummary as GetDiffSummaryResponse) ?? null; +}; diff --git a/frontend/app/src/entities/diff/node-diff/index.tsx b/frontend/app/src/entities/diff/node-diff/index.tsx index 979929da09..2d8d8c95b8 100644 --- a/frontend/app/src/entities/diff/node-diff/index.tsx +++ b/frontend/app/src/entities/diff/node-diff/index.tsx @@ -9,7 +9,9 @@ import { DateDisplay } from "@/shared/components/display/date-display"; import ErrorScreen from "@/shared/components/errors/error-screen"; import { LoadingIndicator } from "@/shared/components/loading/loading-indicator"; +import type { GetDiffSummaryParams } from "@/entities/diff/domain/get-diff-summary"; import { useDiffTreeInfiniteQuery } from "@/entities/diff/domain/get-diff-tree"; +import { DiffNode } from "@/entities/diff/node-diff/node"; import { DIFF_STATUS, type DiffNode as DiffNodeType } from "@/entities/diff/node-diff/types"; import { buildFilters } from "@/entities/diff/node-diff/utils"; import { DiffComputing } from "@/entities/diff/ui/diff-computing"; @@ -19,29 +21,24 @@ import { DiffRebaseButton } from "@/entities/diff/ui/diff-rebase-button"; import { DiffRefreshButton } from "@/entities/diff/ui/diff-refresh-button"; import DiffTree from "@/entities/diff/ui/diff-tree"; import { proposedChangedState } from "@/entities/proposed-changes/stores/proposedChanges.atom"; - -import { type DiffFilter, ProposedChangeDiffFilter } from "../../proposed-changes/ui/diff-filter"; -import { DiffNode } from "./node"; +import { DiffFilter } from "@/entities/proposed-changes/ui/diff-filter"; export const DiffContext = createContext({}); -type NodeDiffProps = { - filters: DiffFilter; - branchName: string; -}; +type NodeDiffProps = GetDiffSummaryParams; -export const NodeDiff = ({ branchName, filters }: NodeDiffProps) => { +export const NodeDiff = ({ branch, filters }: NodeDiffProps) => { const [qspStatus] = useQueryState(QSP.STATUS); const proposedChangesDetails = useAtomValue(proposedChangedState); - const branch = proposedChangesDetails?.source_branch?.value || branchName; // Used in proposed changes view and branch view + const branchName: string = proposedChangesDetails?.source_branch?.value || branch; // Used in proposed changes view and branch view // Get filters merged with status filter const finalFilters = buildFilters(filters, qspStatus); const { data, isPending, error, hasNextPage, fetchNextPage, isFetchingNextPage } = useDiffTreeInfiniteQuery({ - branchName: branch, + branchName, filters: finalFilters, }); @@ -66,14 +63,14 @@ export const NodeDiff = ({ branchName, filters }: NodeDiffProps) => { if (!firstPageNodes) { return ( ); } if (!qspStatus && firstPageNodes.nodes?.length === 0) { - return ; + return ; } const nodes = @@ -89,12 +86,12 @@ export const NodeDiff = ({ branchName, filters }: NodeDiffProps) => { return (
- + Updated - - + +
diff --git a/frontend/app/src/entities/diff/node-diff/utils.tsx b/frontend/app/src/entities/diff/node-diff/utils.tsx index 3ced9e71f6..386ecd7fa8 100644 --- a/frontend/app/src/entities/diff/node-diff/utils.tsx +++ b/frontend/app/src/entities/diff/node-diff/utils.tsx @@ -6,8 +6,6 @@ import { classNames, warnUnexpectedType } from "@/shared/utils/common"; import { capitalizeFirstLetter } from "@/shared/utils/string"; import { DIFF_STATUS, type DiffProperty, type DiffStatus } from "@/entities/diff/node-diff/types"; -import type { DiffFilter } from "@/entities/proposed-changes/ui/diff-filter"; - import { BadgeAdded, BadgeConflict, @@ -16,7 +14,8 @@ import { BadgeUnchanged, BadgeUpdated, type DiffBadgeProps, -} from "../ui/diff-badge"; +} from "@/entities/diff/ui/diff-badge"; +import type { DiffFilter } from "@/entities/proposed-changes/ui/diff-filter"; export const diffBadges: { [key: string]: BadgeType } = { ADDED: BadgeAdded, diff --git a/frontend/app/src/entities/diff/ui/diff-badge.tsx b/frontend/app/src/entities/diff/ui/diff-badge.tsx index 167a660663..db3b38e92f 100644 --- a/frontend/app/src/entities/diff/ui/diff-badge.tsx +++ b/frontend/app/src/entities/diff/ui/diff-badge.tsx @@ -8,10 +8,6 @@ export interface DiffBadgeProps extends BadgeProps { icon?: string; } -type CloseBadgeProps = { - className?: string; -}; - export type BadgeType = | typeof BadgeAdded | typeof BadgeRemoved @@ -53,10 +49,6 @@ export const BadgeAdded = ({ className, ...props }: DiffBadgeProps) => { ); }; -export const CloseBadgeAdded = () => { - return ; -}; - export const BadgeRemoved = ({ className, ...props }: DiffBadgeProps) => { return ( { ); }; -export const CloseBadgeRemoved = () => { - return ; -}; - export const BadgeConflict = ({ className, ...props }: DiffBadgeProps) => { return ( { ); }; -export const CloseBadgeConflict = () => { - return ; -}; - export const BadgeUpdated = ({ className, ...props }: DiffBadgeProps) => { return ( { /> ); }; - -export const CloseBadgeUpdated = () => { - return ; -}; - -const CloseBadge = ({ className }: CloseBadgeProps) => { - return ( -
- -
- ); -}; diff --git a/frontend/app/src/entities/proposed-changes/api/getProposedChangesDiffSummary.ts b/frontend/app/src/entities/proposed-changes/api/getProposedChangesDiffSummary.ts deleted file mode 100644 index 21ef69a0c8..0000000000 --- a/frontend/app/src/entities/proposed-changes/api/getProposedChangesDiffSummary.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from "@apollo/client"; - -export const getProposedChangesDiffSummary = gql` - query GET_PROPOSED_CHANGES_DIFF_SUMMARY($branch: String) { - DiffTreeSummary(branch: $branch) { - num_added - num_updated - num_removed - num_conflicts - } - } -`; diff --git a/frontend/app/src/entities/proposed-changes/ui/diff-filter.tsx b/frontend/app/src/entities/proposed-changes/ui/diff-filter.tsx index c0a51de413..b8a3237664 100644 --- a/frontend/app/src/entities/proposed-changes/ui/diff-filter.tsx +++ b/frontend/app/src/entities/proposed-changes/ui/diff-filter.tsx @@ -1,62 +1,36 @@ -import { useQuery } from "@apollo/client"; import { parseAsString, useQueryState } from "nuqs"; -import { toast } from "react-toastify"; import { QSP } from "@/config/qsp"; -import { Button, type ButtonProps } from "@/shared/components/buttons/button-primitive"; import ErrorScreen from "@/shared/components/errors/error-screen"; -import { ALERT_TYPES, Alert } from "@/shared/components/ui/alert"; -import { classNames } from "@/shared/utils/common"; -import { DIFF_STATUS } from "@/entities/diff/node-diff/types"; -import { DiffBadge } from "@/entities/diff/node-diff/utils"; +import type { GetDiffSummaryParams } from "@/entities/diff/domain/get-diff-summary"; +import { useGetDiffSummary } from "@/entities/diff/domain/get-diff-summary.query"; +import { DIFF_STATUS, type DiffStatus } from "@/entities/diff/node-diff/types"; +import { DiffSummarySkeleton } from "@/entities/proposed-changes/ui/diff-summary/diff-summary-skeleton"; import { - CloseBadgeAdded, - CloseBadgeConflict, - CloseBadgeRemoved, - CloseBadgeUpdated, -} from "@/entities/diff/ui/diff-badge"; -import { getProposedChangesDiffSummary } from "@/entities/proposed-changes/api/getProposedChangesDiffSummary"; + DiffSummaryTag, + DiffSummaryTagGroup, +} from "@/entities/proposed-changes/ui/diff-summary/diff-summary-tag-group"; -export type DiffFilter = { - namespace?: { - excludes?: string[]; - includes?: string[]; - }; - status?: { - excludes?: string[]; - includes?: string[]; - }; -}; - -type ProposedChangeDiffFilterProps = { - branch: string; - filters?: DiffFilter; -}; - -export const ProposedChangeDiffFilter = ({ branch, filters }: ProposedChangeDiffFilterProps) => { - const [qsp, setQsp] = useQueryState(QSP.STATUS, parseAsString.withOptions({ shallow: false })); +type DiffFilterProps = GetDiffSummaryParams; - const { error, data = {} } = useQuery(getProposedChangesDiffSummary, { - skip: !branch, - variables: { branch, filters }, - context: { - processErrorMessage: (message: string) => { - // If the branch is not found, then do not display alert - if (message.includes("not found")) return; +export function DiffFilter({ branch, filters }: DiffFilterProps) { + const [statusFilterQSP, setQsp] = useQueryState( + QSP.STATUS, + parseAsString.withOptions({ shallow: false }) + ); - toast(, { - toastId: "alert-error", - }); - }, - }, - }); + const { error, data, isPending } = useGetDiffSummary({ branch, filters }); - const handleFilter = (value: string) => { - setQsp(value === qsp ? null : value); + const handleFilter = (value: DiffStatus) => { + setQsp(value === statusFilterQSP ? null : value); }; + if (isPending) { + return ; + } + if (error) { return ( - + handleFilter(DIFF_STATUS.ADDED)} /> - handleFilter(DIFF_STATUS.REMOVED)} /> - handleFilter(DIFF_STATUS.UPDATED)} /> - handleFilter(DIFF_STATUS.CONFLICT)} /> -
+ ); -}; - -interface FilterButtonProps extends ButtonProps { - status: string; - count: number; - currentFilter: string | null | undefined; - onFilter: (value: string) => void; } - -const FilterButton = ({ status, count, currentFilter, onFilter, ...props }: FilterButtonProps) => { - const isMuted = !!currentFilter && currentFilter !== status; - const isDisabled = !count && currentFilter !== status; - - const CloseBadge = - status === DIFF_STATUS.ADDED - ? CloseBadgeAdded - : status === DIFF_STATUS.REMOVED - ? CloseBadgeRemoved - : status === DIFF_STATUS.UPDATED - ? CloseBadgeUpdated - : status === DIFF_STATUS.CONFLICT - ? CloseBadgeConflict - : null; - - return ( - - ); -}; diff --git a/frontend/app/src/entities/proposed-changes/ui/diff-summary.tsx b/frontend/app/src/entities/proposed-changes/ui/diff-summary.tsx deleted file mode 100644 index 47e1683c1d..0000000000 --- a/frontend/app/src/entities/proposed-changes/ui/diff-summary.tsx +++ /dev/null @@ -1,120 +0,0 @@ -import { useQuery } from "@apollo/client"; -import type React from "react"; -import { Link } from "react-router"; -import { toast } from "react-toastify"; - -import { QSP } from "@/config/qsp"; - -import { constructPath } from "@/shared/api/rest/fetch"; -import ErrorScreen from "@/shared/components/errors/error-screen"; -import { ALERT_TYPES, Alert } from "@/shared/components/ui/alert"; - -import { DiffBadge } from "@/entities/diff/node-diff/utils"; -import { getProposedChangesDiffSummary } from "@/entities/proposed-changes/api/getProposedChangesDiffSummary"; - -import { DIFF_STATUS, type DiffStatus } from "../../diff/node-diff/types"; - -interface DiffTreeSummary { - num_added: number; - num_removed: number; - num_updated: number; - num_conflicts: number; -} - -interface ProposedChangeDiffSummaryProps { - branchName: string; - proposedChangeId: string; -} - -const BadgeLink: React.FC<{ - status: DiffStatus; - count: number | undefined; - proposedChangeId: string; -}> = ({ status, count, proposedChangeId }) => { - const proposedChangeDetailsPath = `/proposed-changes/${proposedChangeId}`; - - return ( - - {count} - - ); -}; - -export const ProposedChangeDiffSummary: React.FC = ({ - proposedChangeId, - branchName, -}) => { - const { error, data, loading } = useQuery<{ DiffTreeSummary: DiffTreeSummary }>( - getProposedChangesDiffSummary, - { - skip: !branchName, - variables: { branch: branchName }, - context: { - processErrorMessage: (message: string) => { - if (!message.includes("not found")) { - toast(, { - toastId: "alert-error", - }); - } - }, - }, - } - ); - - if (loading) { - return ; - } - - if (error) { - return ( - - ); - } - - const { DiffTreeSummary } = data || {}; - - return ( -
- - - - -
- ); -}; - -const DiffSummarySkeleton: React.FC = () => { - return ( -
- {[...Array(4)].map((_, index) => ( -
- ))} -
- ); -}; diff --git a/frontend/app/src/entities/proposed-changes/ui/diff-summary/diff-summary-skeleton.tsx b/frontend/app/src/entities/proposed-changes/ui/diff-summary/diff-summary-skeleton.tsx new file mode 100644 index 0000000000..c6e41cc598 --- /dev/null +++ b/frontend/app/src/entities/proposed-changes/ui/diff-summary/diff-summary-skeleton.tsx @@ -0,0 +1,11 @@ +import { Row } from "@/shared/components/container"; + +export function DiffSummarySkeleton() { + return ( + + {[...Array(4)].map((_, index) => ( +
+ ))} + + ); +} diff --git a/frontend/app/src/entities/proposed-changes/ui/diff-summary/diff-summary-tag-group.tsx b/frontend/app/src/entities/proposed-changes/ui/diff-summary/diff-summary-tag-group.tsx new file mode 100644 index 0000000000..7596d8558e --- /dev/null +++ b/frontend/app/src/entities/proposed-changes/ui/diff-summary/diff-summary-tag-group.tsx @@ -0,0 +1,129 @@ +import { Icon } from "@iconify-icon/react"; +import { cva, type VariantProps } from "class-variance-authority"; +import { CircleMinusIcon, CirclePlusIcon, RefreshCwIcon, TriangleAlertIcon } from "lucide-react"; +import { + Tag, + TagGroup, + type TagGroupProps, + TagList, + type TagListProps, + type TagProps, +} from "react-aria-components"; + +import { disabledStyle, focusVisibleStyle } from "@/shared/components/style-rac"; +import { classNames } from "@/shared/utils/common"; + +export interface DiffSummaryProps + extends Omit, + Pick, "items" | "children" | "renderEmptyState"> {} + +export function DiffSummaryTagGroup({ + items, + children, + renderEmptyState, + ...props +}: DiffSummaryProps) { + return ( + + + + {children} + + + + ); +} + +const diffSummaryTagStyles = cva( + [ + disabledStyle, + focusVisibleStyle, + "relative inline-flex cursor-pointer items-center gap-1 rounded-full border border-transparent p-1 text-xs", + ], + { + variants: { + variant: { + added: "bg-green-200 text-green-800", + removed: "bg-red-200 text-red-800", + updated: "bg-blue-200 text-blue-800", + conflicts: "bg-yellow-200 text-yellow-800", + }, + isMuted: { + true: "opacity-50", + }, + }, + } +); + +export interface DiffSummaryTagProps extends VariantProps, TagProps { + count: number; + isClosable?: boolean; +} + +export function DiffSummaryTag({ + count, + variant, + className, + isClosable, + isMuted, + children, + ...props +}: DiffSummaryTagProps) { + return ( + + + {count} + {isClosable && } + + ); +} + +export function DiffSummaryIcon({ + variant, + ...props +}: Pick, "variant">) { + const className = "size-3"; + + switch (variant) { + case "added": + return ; + case "removed": + return ; + case "updated": + return ; + case "conflicts": + return ; + default: + return null; + } +} + +const diffSummaryCloseStyles = cva( + "-top-2 -right-2 absolute flex items-center justify-center rounded-full border-2 border-white", + { + variants: { + variant: { + added: "bg-green-200 text-green-800", + removed: "bg-red-200 text-red-800", + updated: "bg-blue-200 text-blue-800", + conflicts: "bg-yellow-200 text-yellow-800", + }, + }, + } +); + +export interface DiffSummaryCloseProps + extends React.HTMLProps, + VariantProps {} + +export function DiffSummaryClose({ className, variant, ...props }: DiffSummaryCloseProps) { + return ( +
+ +
+ ); +} diff --git a/frontend/app/src/entities/proposed-changes/ui/diff-summary/proposed-change-diff-summary.tsx b/frontend/app/src/entities/proposed-changes/ui/diff-summary/proposed-change-diff-summary.tsx new file mode 100644 index 0000000000..069ba0f320 --- /dev/null +++ b/frontend/app/src/entities/proposed-changes/ui/diff-summary/proposed-change-diff-summary.tsx @@ -0,0 +1,81 @@ +import { QSP } from "@/config/qsp"; + +import { constructPath } from "@/shared/api/rest/fetch"; +import ErrorScreen from "@/shared/components/errors/error-screen"; + +import { useGetDiffSummary } from "@/entities/diff/domain/get-diff-summary.query"; +import { DIFF_STATUS } from "@/entities/diff/node-diff/types"; +import { DiffSummarySkeleton } from "@/entities/proposed-changes/ui/diff-summary/diff-summary-skeleton"; +import { + DiffSummaryTag, + DiffSummaryTagGroup, +} from "@/entities/proposed-changes/ui/diff-summary/diff-summary-tag-group"; + +interface ProposedChangeDiffSummaryProps { + branchName: string; + proposedChangeId: string; +} + +export function ProposedChangeDiffSummary({ + proposedChangeId, + branchName, +}: ProposedChangeDiffSummaryProps) { + const { error, data, isPending } = useGetDiffSummary({ branch: branchName }); + + if (isPending) { + return ; + } + + if (error) { + return ( + + ); + } + + if (!data) { + return null; + } + + const proposedChangeDetailsPath = `/proposed-changes/${proposedChangeId}`; + + return ( + + + + + + + ); +} diff --git a/frontend/app/src/entities/proposed-changes/ui/proposed-change-item.tsx b/frontend/app/src/entities/proposed-changes/ui/proposed-change-item.tsx index d9db365f1a..2bb49e8953 100644 --- a/frontend/app/src/entities/proposed-changes/ui/proposed-change-item.tsx +++ b/frontend/app/src/entities/proposed-changes/ui/proposed-change-item.tsx @@ -1,7 +1,7 @@ import { Icon } from "@iconify-icon/react"; import { Link } from "react-router"; -import { ARTIFACT_OBJECT, CHECK_OBJECT, TASK_OBJECT } from "@/config/constants"; +import { CHECK_OBJECT, TASK_OBJECT } from "@/config/constants"; import { constructPath } from "@/shared/api/rest/fetch"; import { DateDisplay } from "@/shared/components/display/date-display"; @@ -12,7 +12,7 @@ import { classNames } from "@/shared/utils/common"; import { useObjectsCount } from "@/entities/nodes/object/domain/get-objects-count.query"; import { useObjectTableContext } from "@/entities/nodes/object/ui/object-table/object-table-context"; import type { ProposedChangeItem } from "@/entities/proposed-changes/domain/get-proposed-changes"; -import { ProposedChangeDiffSummary } from "@/entities/proposed-changes/ui/diff-summary"; +import { ProposedChangeDiffSummary } from "@/entities/proposed-changes/ui/diff-summary/proposed-change-diff-summary"; import { ProposedChangesActionCell } from "@/entities/proposed-changes/ui/proposed-changes-actions-cell"; import { useSchema } from "@/entities/schema/ui/hooks/useSchema"; @@ -131,7 +131,6 @@ const ProposedChangesData = ({
-
@@ -161,22 +160,6 @@ const ProposedChangesChecks = ({ validations }: { validations: number }) => { ); }; -const ProposedChangesArtifacts = ({ id }: { id: string }) => { - const { schema } = useSchema(ARTIFACT_OBJECT); - const { data } = useObjectsCount({ - objectKind: ARTIFACT_OBJECT, - filters: [{ name: "object__ids", value: [id] }], - }); - - return ( - - - {data ?? 0} - - - ); -}; - const ProposedChangesTasks = ({ id }: { id: string }) => { const { schema } = useSchema(TASK_OBJECT); const { data } = useObjectsCount({ diff --git a/frontend/app/src/entities/schema/ui/relationship-display.tsx b/frontend/app/src/entities/schema/ui/relationship-display.tsx index bcdcc170a1..275de91a2a 100644 --- a/frontend/app/src/entities/schema/ui/relationship-display.tsx +++ b/frontend/app/src/entities/schema/ui/relationship-display.tsx @@ -4,13 +4,11 @@ import type { components } from "@/shared/api/rest/types.generated"; import { Badge } from "@/shared/components/ui/badge"; import { warnUnexpectedType } from "@/shared/utils/common"; -import { AccordionStyled, ListDisplay, ModelDisplay, PropertyRow } from "./styled"; +import type { RelationshipSchema } from "@/entities/schema/types"; -export const RelationshipDisplay = ({ - relationship, -}: { - relationship: components["schemas"]["RelationshipSchema-Output"]; -}) => { +import { AccordionStyled, ModelDisplay, PropertyRow } from "./styled"; + +export const RelationshipDisplay = ({ relationship }: { relationship: RelationshipSchema }) => { const cardinalityLabel = relationship.cardinality ? getLabelForCardinality(relationship.cardinality) : null; @@ -44,15 +42,13 @@ export const RelationshipDisplay = ({
} /> - } - /> + +
diff --git a/frontend/app/src/entities/schema/ui/styled.tsx b/frontend/app/src/entities/schema/ui/styled.tsx index a295dd33f3..152fed1f9c 100644 --- a/frontend/app/src/entities/schema/ui/styled.tsx +++ b/frontend/app/src/entities/schema/ui/styled.tsx @@ -75,7 +75,7 @@ export const PropertyRow = ({ case "number": return value; case "boolean": - return {value.toString()}; + return {value.toString()}; case "object": if (Array.isArray(value)) { return ( diff --git a/frontend/app/src/pages/branches/details.tsx b/frontend/app/src/pages/branches/details.tsx index 747e94f853..d20cf4a4a8 100644 --- a/frontend/app/src/pages/branches/details.tsx +++ b/frontend/app/src/pages/branches/details.tsx @@ -99,7 +99,7 @@ const BranchContent = ({ branchName }: { branchName: string }) => { case DIFF_TABS.SCHEMA: { return ( { case DIFF_TABS.DATA: { return ( { const [qspTab, setQspTab] = useQueryState( qsp ?? QSP.TAB, - parseAsString.withOptions({ history: "push" }) + parseAsString.withOptions({ history: "push", shallow: false }) ); const handleClick = (tab: Tab, index: number) => { diff --git a/frontend/app/tests/e2e/objects/convert/object-convert.spec.ts b/frontend/app/tests/e2e/objects/convert/object-convert.spec.ts index a6c20b03ff..84cec3f775 100644 --- a/frontend/app/tests/e2e/objects/convert/object-convert.spec.ts +++ b/frontend/app/tests/e2e/objects/convert/object-convert.spec.ts @@ -1,7 +1,7 @@ import { expect, test } from "@playwright/test"; import { ACCOUNT_STATE_PATH } from "../../../constants"; -import { generateRandomBranchName } from "../../../utils"; +import { generateRandomBranchName, saveScreenshotForDocs } from "../../../utils"; import { createBranchAPI, deleteBranchAPI } from "../../utils/graphql"; test.describe("Object details - convert", () => { @@ -22,6 +22,7 @@ test.describe("Object details - convert", () => { await page.goto(`/objects/InfraInterface?branch=${BRANCH_NAME}`); await page.getByRole("link", { name: "atl1-edge1, Ethernet1", exact: true }).click(); await page.getByTestId("object-details-button").click(); + await saveScreenshotForDocs(page, "object_convert_button"); await page.getByRole("menuitem", { name: "Convert object type" }).click(); await expect(page.getByText("SOURCE")).toBeVisible(); await expect(page.getByText("NameEthernet1")).toBeVisible(); @@ -35,6 +36,7 @@ test.describe("Object details - convert", () => { await expect( page.getByRole("combobox").filter({ hasText: "atl1-edge1• Device" }) ).toBeVisible(); + await saveScreenshotForDocs(page, "object_convert_mapping"); await page.getByRole("combobox").filter({ hasText: "atl1-edge1• Device" }).click(); await expect(page.getByRole("option", { name: "atl1-edge1 Matched Device" })).toBeVisible(); diff --git a/frontend/app/tests/e2e/proposed-changes/proposed-changes.spec.ts b/frontend/app/tests/e2e/proposed-changes/proposed-changes.spec.ts index 4c38610fc6..0c22baea96 100644 --- a/frontend/app/tests/e2e/proposed-changes/proposed-changes.spec.ts +++ b/frontend/app/tests/e2e/proposed-changes/proposed-changes.spec.ts @@ -7,14 +7,6 @@ import { createBranchAPI, deleteBranchAPI } from "../utils/graphql"; test.describe("/proposed-changes", () => { test.describe.configure({ mode: "serial" }); - test.beforeEach(async function ({ page }) { - page.on("response", async (response) => { - if (response.status() === 500) { - await expect(response.url()).toBe("This URL responded with a 500 status"); - } - }); - }); - test.describe("when not logged in", () => { test("should not be able to create a proposed changes", async ({ page }) => { await page.goto("/proposed-changes"); diff --git a/frontend/app/tests/e2e/proposed-changes/proposed-changes_diff.spec.ts b/frontend/app/tests/e2e/proposed-changes/proposed-changes_diff.spec.ts index c0e1152ed7..a33ec806ed 100644 --- a/frontend/app/tests/e2e/proposed-changes/proposed-changes_diff.spec.ts +++ b/frontend/app/tests/e2e/proposed-changes/proposed-changes_diff.spec.ts @@ -6,14 +6,6 @@ test.describe("/proposed-changes diff data", () => { test.describe.configure({ mode: "serial" }); test.use({ storageState: ACCOUNT_STATE_PATH.ADMIN }); - test.beforeEach(async function ({ page }) { - page.on("response", async (response) => { - if (response.status() === 500) { - await expect(response.url()).toBe("This URL responded with a 500 status"); - } - }); - }); - test("should verify the diff data with conflicts", async ({ page }) => { await test.step("create a new proposed change with reviewers", async () => { await page.goto("/proposed-changes"); @@ -24,7 +16,7 @@ test.describe("/proposed-changes diff data", () => { await page.getByLabel("Reviewers").click(); await page.getByRole("option", { name: "Admin" }).click(); await page.getByLabel("Reviewers").click(); - await page.getByRole("button", { name: "Open" }).click(); + await page.getByRole("button", { name: "Open", exact: true }).click(); await expect(page.getByText("Proposed change created")).toBeVisible(); await page.getByText("Data").click(); }); @@ -41,12 +33,16 @@ test.describe("/proposed-changes diff data", () => { await expect( page.getByText("UpdatedInterfaceL3Ethernet1 main den1-maintenance-") ).toBeVisible(); - await page.getByText("UpdatedDeviceden1-edge1").click(); + await page.getByLabel("diff tree").getByText("den1-edge1").click(); await page .getByText( "main den1-maintenance-conflictstatusConflictactiveprovisioningmaintenanceChoose" ) .click(); + const hash = await page.evaluate(() => window.location.hash); + const highlightedNodeDiff = page.locator(`id=${hash.slice(1)}`); + await expect(highlightedNodeDiff).toBeInViewport(); + await expect(highlightedNodeDiff).toContainClass("ring-2 ring-custom-blue-500"); }); await test.step("resolve conflict", async () => { diff --git a/poetry.lock b/poetry.lock index 53a06fac73..ca004f95be 100644 --- a/poetry.lock +++ b/poetry.lock @@ -750,7 +750,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {dev = "sys_platform == \"win32\" or platform_system == \"Windows\""} +markers = {dev = "platform_system == \"Windows\" or sys_platform == \"win32\""} [[package]] name = "contourpy" @@ -5378,6 +5378,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}, @@ -5386,6 +5387,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}, @@ -5394,6 +5396,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}, @@ -5402,6 +5405,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}, @@ -5410,6 +5414,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}, {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, @@ -6031,31 +6036,31 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uv" -version = "0.8.13" +version = "0.9.5" description = "An extremely fast Python package and project manager, written in Rust." optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "uv-0.8.13-py3-none-linux_armv6l.whl", hash = "sha256:3b5c6e44238007ec1d25212cafe1b37a8506d425d1dd74a267cb9072a61930f9"}, - {file = "uv-0.8.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:2945c32b8fcf23807ef1f74c390795e2b00371c53b94c015cc6e7b0cfbab9d94"}, - {file = "uv-0.8.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:73459fe1403b1089853071db6770450dc03e4058848f7146d88cff5f1c352743"}, - {file = "uv-0.8.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:854c4e75024a4894477bf61684b2872b83c77ca87d1bad62692bcc31200619c3"}, - {file = "uv-0.8.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:28c8d4560c673ff5c798f2f4422281840728f46ebf1946345b65d065f8344c03"}, - {file = "uv-0.8.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6c508aa9c5210577008e1919b532e38356fe68712179399f00462b3e78fd845"}, - {file = "uv-0.8.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3bac51ea503d97f371222f23e845fc4ab95465ac3e958c7589d6743c75445b71"}, - {file = "uv-0.8.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6d37547947fcae57244b4d1f3b62fba55f4a85d3e45e7284a93b6cd5bedca4"}, - {file = "uv-0.8.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3735a452cdc3168932d128891d7e8866b4a2d052283c6da5ccfe0b038d1cf8bd"}, - {file = "uv-0.8.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2113cd877974b68ea2af64a2f2cc23708ba97066046e78efb72ba94e5fef617a"}, - {file = "uv-0.8.13-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:4c2c5e5962239ecaff6444d5bc22422a9bd2da25a80adc6ab14cb42e4461b1cf"}, - {file = "uv-0.8.13-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:eb90089624d92d57b8582f708973db8988e09dba6bae83991dba20731d82eb6a"}, - {file = "uv-0.8.13-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:cf3ce98404ddc1e11cd2c2604668f8f81219cf00bb1227b792fdf5dbb4faf31a"}, - {file = "uv-0.8.13-py3-none-musllinux_1_1_i686.whl", hash = "sha256:8a3739540f8b0b5258869b1671185d55daacfa4609eaffd235573ac938ec01a6"}, - {file = "uv-0.8.13-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:18a502328545af511039c7b7c602a0aa89eeff23b1221a1f56d99b3a3fecfddd"}, - {file = "uv-0.8.13-py3-none-win32.whl", hash = "sha256:d22fa55580b224779279b98e0b23cbc45e51837e1fac616d7c5d03aff668a998"}, - {file = "uv-0.8.13-py3-none-win_amd64.whl", hash = "sha256:20862f612de38f6dea55d40467a29f3cb621b256a4b5891ae55debbbdf1db2b4"}, - {file = "uv-0.8.13-py3-none-win_arm64.whl", hash = "sha256:404ca19b2d860ab661e1d78633f594e994f8422af8772ad237d763fe353da2ab"}, - {file = "uv-0.8.13.tar.gz", hash = "sha256:a4438eca3d301183c52994a6d2baff70fd1840421a83446f3cabb1d0d0b50aff"}, + {file = "uv-0.9.5-py3-none-linux_armv6l.whl", hash = "sha256:f8eb34ebebac4b45334ce7082cca99293b71fb32b164651f1727c8a640e5b387"}, + {file = "uv-0.9.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:922cd784cce36bbdc7754b590d28c276698c85791c18cd4c6a7e917db4480440"}, + {file = "uv-0.9.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8603bb902e578463c50c3ddd4ee376ba4172ccdf4979787f8948747d1bb0e18b"}, + {file = "uv-0.9.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:48a3542835d37882ff57d1ff91b757085525d98756712fa61cf9941d3dda8ebf"}, + {file = "uv-0.9.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:21452ece590ddb90e869a478ca4c2ba70be180ec0d6716985ee727b9394c8aa5"}, + {file = "uv-0.9.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb31c9896dc2c88f6a9f1d693be2409fe2fc2e3d90827956e4341c2b2171289"}, + {file = "uv-0.9.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:02db727beb94a2137508cee5a785c3465d150954ca9abdff2d8157c76dea163e"}, + {file = "uv-0.9.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c465f2e342cab908849b8ce83e14fd4cf75f5bed55802d0acf1399f9d02f92d9"}, + {file = "uv-0.9.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:133e2614e1ff3b34c2606595d8ae55710473ebb7516bfa5708afc00315730cd1"}, + {file = "uv-0.9.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6507bbbcd788553ec4ad5a96fa19364dc0f58b023e31d79868773559a83ec181"}, + {file = "uv-0.9.5-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:6a046c2e833169bf26f461286aab58a2ba8d48ed2220bfcf119dcfaf87163116"}, + {file = "uv-0.9.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9fc13b4b943d19adac52d7dcd2159e96ab2e837ac49a79e20714ed25f1f1b7f9"}, + {file = "uv-0.9.5-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:5bb4996329ba47e7e775baba4a47e85092aa491d708a66e63b564e9b306bfb7e"}, + {file = "uv-0.9.5-py3-none-musllinux_1_1_i686.whl", hash = "sha256:6452eb6257e37e1ebd97430b5f5e10419da2c3ca35b4086540ec4163b4b2f25c"}, + {file = "uv-0.9.5-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:3a4ecbfdcbd3dae4190428874762c791e05d2c97ff2872bf6c0a30ed5c4ea9ca"}, + {file = "uv-0.9.5-py3-none-win32.whl", hash = "sha256:0316493044035098666d6e99c14bd61b352555d9717d57269f4ce531855330fa"}, + {file = "uv-0.9.5-py3-none-win_amd64.whl", hash = "sha256:48a12390421f91af8a8993cf15c38297c0bb121936046286e287975b2fbf1789"}, + {file = "uv-0.9.5-py3-none-win_arm64.whl", hash = "sha256:c966e3a4fe4de3b0a6279d0a835c79f9cddbb3693f52d140910cbbed177c5742"}, + {file = "uv-0.9.5.tar.gz", hash = "sha256:d8835d2c034421ac2235fb658bb4f669a301a0f1eb00a8430148dd8461b65641"}, ] [[package]] diff --git a/python_testcontainers/infrahub_testcontainers/container.py b/python_testcontainers/infrahub_testcontainers/container.py index 10c5aa71e8..605e263fc3 100644 --- a/python_testcontainers/infrahub_testcontainers/container.py +++ b/python_testcontainers/infrahub_testcontainers/container.py @@ -152,6 +152,8 @@ def create_env_file(self, directory: Path, version: str) -> Path: "INFRAHUB_TESTING_TASKMGR_BACKGROUND_SVC_REPLICAS": "1", "PREFECT_MESSAGING_BROKER": "prefect_redis.messaging", "PREFECT_MESSAGING_CACHE": "prefect_redis.messaging", + "PREFECT_SERVER_EVENTS_CAUSAL_ORDERING": "prefect_redis.ordering", + "PREFECT_SERVER_CONCURRENCY_LEASE_STORAGE": "prefect_redis.lease_storage", "PREFECT__SERVER_WEBSERVER_ONLY": "true", "PREFECT_API_DATABASE_MIGRATE_ON_START": "false", "PREFECT_API_BLOCKS_REGISTER_ON_START": "false", diff --git a/python_testcontainers/infrahub_testcontainers/docker-compose-cluster.test.yml b/python_testcontainers/infrahub_testcontainers/docker-compose-cluster.test.yml index 0b8607a3be..50c72f193b 100644 --- a/python_testcontainers/infrahub_testcontainers/docker-compose-cluster.test.yml +++ b/python_testcontainers/infrahub_testcontainers/docker-compose-cluster.test.yml @@ -184,6 +184,8 @@ services: PREFECT_MESSAGING_BROKER: PREFECT_MESSAGING_CACHE: + PREFECT_SERVER_EVENTS_CAUSAL_ORDERING: + PREFECT_SERVER_CONCURRENCY_LEASE_STORAGE: PREFECT__SERVER_WEBSERVER_ONLY: PREFECT_API_DATABASE_MIGRATE_ON_START: PREFECT_API_BLOCKS_REGISTER_ON_START: @@ -225,6 +227,8 @@ services: INFRAHUB_CACHE_ADDRESS: ${INFRAHUB_TESTING_CACHE_ADDRESS} PREFECT_MESSAGING_BROKER: prefect_redis.messaging PREFECT_MESSAGING_CACHE: prefect_redis.messaging + PREFECT_SERVER_EVENTS_CAUSAL_ORDERING: prefect_redis.ordering + PREFECT_SERVER_CONCURRENCY_LEASE_STORAGE: prefect_redis.lease_storage PREFECT_REDIS_MESSAGING_HOST: "${INFRAHUB_TESTING_CACHE_ADDRESS:-cache}" PREFECT_REDIS_MESSAGING_DB: "1" PREFECT_REDIS_MESSAGING_CONSUMER_MIN_IDLE_TIME: "30" diff --git a/python_testcontainers/infrahub_testcontainers/docker-compose.test.yml b/python_testcontainers/infrahub_testcontainers/docker-compose.test.yml index c9edbf36e3..7b32f704fd 100644 --- a/python_testcontainers/infrahub_testcontainers/docker-compose.test.yml +++ b/python_testcontainers/infrahub_testcontainers/docker-compose.test.yml @@ -95,6 +95,8 @@ services: PREFECT_MESSAGING_BROKER: PREFECT_MESSAGING_CACHE: + PREFECT_SERVER_EVENTS_CAUSAL_ORDERING: + PREFECT_SERVER_CONCURRENCY_LEASE_STORAGE: PREFECT__SERVER_WEBSERVER_ONLY: PREFECT_API_DATABASE_MIGRATE_ON_START: PREFECT_API_BLOCKS_REGISTER_ON_START: @@ -136,6 +138,8 @@ services: INFRAHUB_CACHE_ADDRESS: ${INFRAHUB_TESTING_CACHE_ADDRESS} PREFECT_MESSAGING_BROKER: prefect_redis.messaging PREFECT_MESSAGING_CACHE: prefect_redis.messaging + PREFECT_SERVER_EVENTS_CAUSAL_ORDERING: prefect_redis.ordering + PREFECT_SERVER_CONCURRENCY_LEASE_STORAGE: prefect_redis.lease_storage PREFECT_REDIS_MESSAGING_HOST: "${INFRAHUB_TESTING_CACHE_ADDRESS:-cache}" PREFECT_REDIS_MESSAGING_DB: "1" PREFECT_REDIS_MESSAGING_CONSUMER_MIN_IDLE_TIME: "30"