diff --git a/changelog/236.fixed.md b/changelog/236.fixed.md new file mode 100644 index 00000000..510c0a58 --- /dev/null +++ b/changelog/236.fixed.md @@ -0,0 +1 @@ +Respect default branch for client.query_gql_query() and client.set_context_properties() diff --git a/changelog/374.fixed.md b/changelog/374.fixed.md new file mode 100644 index 00000000..0751c038 --- /dev/null +++ b/changelog/374.fixed.md @@ -0,0 +1 @@ +Fix branch creation with the sync client while setting `wait_until_completion=False` \ No newline at end of file diff --git a/changelog/398.fixed.md b/changelog/398.fixed.md new file mode 100644 index 00000000..18de648e --- /dev/null +++ b/changelog/398.fixed.md @@ -0,0 +1 @@ +Fix `infrahubctl info` command when run as an anonymous user \ No newline at end of file diff --git a/docs/docs/python-sdk/topics/object_file.mdx b/docs/docs/python-sdk/topics/object_file.mdx index 7488c02f..aebacb83 100644 --- a/docs/docs/python-sdk/topics/object_file.mdx +++ b/docs/docs/python-sdk/topics/object_file.mdx @@ -38,7 +38,7 @@ Multiple object files can be loaded at once by specifying the path to multiple f The `object load` command will create/update the objects using an `Upsert` operation. All objects previously loaded will NOT be deleted in the Infrahub instance. Also, if some objects present in different files are identical and dependent on each other, the `object load` command will NOT calculate the dependencies between the objects and as such it's the responsibility of the users to execute the command in the right order. -> Object files can also be loaded into Infrahub when using external Git repositories. To see how to do this, please refer to the [.infrahub.yml](https://docs.infrahub.app/topics/infrahub-yml#objects) documentation. +> Object files can also be loaded into Infrahub when using external Git repositories. To see how to do this, please refer to the [.infrahub.yml](https://docs.infrahub.app/topics/infrahub-yml) documentation. ### Validate the format of object files diff --git a/infrahub_sdk/branch.py b/infrahub_sdk/branch.py index 9d7de1fb..2403e1ef 100644 --- a/infrahub_sdk/branch.py +++ b/infrahub_sdk/branch.py @@ -292,13 +292,14 @@ def create( }, } - query = Mutation(mutation="BranchCreate", input_data=input_data, query=MUTATION_QUERY_DATA) + mutation_query = MUTATION_QUERY_TASK if background_execution else MUTATION_QUERY_DATA + query = Mutation(mutation="BranchCreate", input_data=input_data, query=mutation_query) response = self.client.execute_graphql(query=query.render(), tracker="mutation-branch-create") # Make sure server version is recent enough to support background execution, as previously # using background_execution=True had no effect. if background_execution and "task" in response["BranchCreate"]: - return BranchData(**response["BranchCreate"]["task"]["id"]) + return response["BranchCreate"]["task"]["id"] return BranchData(**response["BranchCreate"]["object"]) def delete(self, branch_name: str) -> bool: diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 4a6ad81b..671a2f5f 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -209,7 +209,7 @@ def set_context_properties( delete_unused_nodes=delete_unused_nodes, group_type=group_type, group_params=group_params, - branch=branch, + branch=branch or self.default_branch, ) def _graphql_url( @@ -310,8 +310,7 @@ async def get_version(self) -> str: async def get_user(self) -> dict: """Return user information""" - user_info = await self.execute_graphql(query=QUERY_USER) - return user_info + return await self.execute_graphql(query=QUERY_USER) async def get_user_permissions(self) -> dict: """Return user permissions""" @@ -540,6 +539,7 @@ async def _process_nodes_and_relationships( schema_kind: str, branch: str, prefetch_relationships: bool, + include: list[str] | None, timeout: int | None = None, ) -> ProcessRelationsNode: """Processes InfrahubNode and their Relationships from the GraphQL query response. @@ -564,9 +564,12 @@ async def _process_nodes_and_relationships( node = await InfrahubNode.from_graphql(client=self, branch=branch, data=item, timeout=timeout) nodes.append(node) - if prefetch_relationships: + if prefetch_relationships or (include and any(rel in include for rel in node._relationships)): await node._process_relationships( - node_data=item, branch=branch, related_nodes=related_nodes, timeout=timeout + node_data=item, + branch=branch, + related_nodes=related_nodes, + timeout=timeout, ) return ProcessRelationsNode(nodes=nodes, related_nodes=related_nodes) @@ -816,6 +819,7 @@ async def process_page(page_offset: int, page_number: int) -> tuple[dict, Proces branch=branch, prefetch_relationships=prefetch_relationships, timeout=timeout, + include=include, ) return response, process_result @@ -1103,13 +1107,13 @@ async def query_gql_query( ) -> dict: url = f"{self.address}/api/query/{name}" url_params = copy.deepcopy(params or {}) + url_params["branch"] = branch_name or self.default_branch + headers = copy.copy(self.headers or {}) if self.insert_tracker and tracker: headers["X-Infrahub-Tracker"] = tracker - if branch_name: - url_params["branch"] = branch_name if at: url_params["at"] = at @@ -1565,8 +1569,7 @@ def get_version(self) -> str: def get_user(self) -> dict: """Return user information""" - user_info = self.execute_graphql(query=QUERY_USER) - return user_info + return self.execute_graphql(query=QUERY_USER) def get_user_permissions(self) -> dict: """Return user permissions""" @@ -1831,6 +1834,7 @@ def _process_nodes_and_relationships( schema_kind: str, branch: str, prefetch_relationships: bool, + include: list[str] | None, timeout: int | None = None, ) -> ProcessRelationsNodeSync: """Processes InfrahubNodeSync and their Relationships from the GraphQL query response. @@ -1855,7 +1859,7 @@ def _process_nodes_and_relationships( node = InfrahubNodeSync.from_graphql(client=self, branch=branch, data=item, timeout=timeout) nodes.append(node) - if prefetch_relationships: + if prefetch_relationships or (include and any(rel in include for rel in node._relationships)): node._process_relationships(node_data=item, branch=branch, related_nodes=related_nodes, timeout=timeout) return ProcessRelationsNodeSync(nodes=nodes, related_nodes=related_nodes) @@ -1980,6 +1984,7 @@ def process_page(page_offset: int, page_number: int) -> tuple[dict, ProcessRelat branch=branch, prefetch_relationships=prefetch_relationships, timeout=timeout, + include=include, ) return response, process_result @@ -2242,13 +2247,13 @@ def query_gql_query( ) -> dict: url = f"{self.address}/api/query/{name}" url_params = copy.deepcopy(params or {}) + url_params["branch"] = branch_name or self.default_branch + headers = copy.copy(self.headers or {}) if self.insert_tracker and tracker: headers["X-Infrahub-Tracker"] = tracker - if branch_name: - url_params["branch"] = branch_name if at: url_params["at"] = at if subscribers: diff --git a/infrahub_sdk/ctl/cli_commands.py b/infrahub_sdk/ctl/cli_commands.py index 605743fa..bc6cc3d3 100644 --- a/infrahub_sdk/ctl/cli_commands.py +++ b/infrahub_sdk/ctl/cli_commands.py @@ -409,7 +409,6 @@ def info( # noqa: PLR0915 _: str = CONFIG_PARAM, ) -> None: """Display the status of the Python SDK.""" - info: dict[str, Any] = { "error": None, "status": ":x:", @@ -417,12 +416,17 @@ def info( # noqa: PLR0915 "user_info": {}, "groups": {}, } + client = initialize_client_sync() + fetch_user_details = bool(client.config.username) or bool(client.config.api_token) + try: - client = initialize_client_sync() info["infrahub_version"] = client.get_version() - info["user_info"] = client.get_user() + + if fetch_user_details: + info["user_info"] = client.get_user() + info["groups"] = client.get_user_permissions() + info["status"] = ":white_heavy_check_mark:" - info["groups"] = client.get_user_permissions() except Exception as e: info["error"] = f"{e!s} ({e.__class__.__name__})" @@ -469,7 +473,7 @@ def info( # noqa: PLR0915 pretty_model = Pretty(client.config.model_dump(), expand_all=True) layout["client_info"].update(Panel(pretty_model, title="Client Info")) - # Infrahub information planel + # Infrahub information panel infrahub_info = Table(show_header=False, box=None) if info["user_info"]: infrahub_info.add_row("User:", info["user_info"]["AccountProfile"]["display_label"]) @@ -487,6 +491,8 @@ def info( # noqa: PLR0915 infrahub_info.add_row("Groups:", "") for group, roles in groups.items(): infrahub_info.add_row("", group, ", ".join(roles)) + else: + infrahub_info.add_row("User:", "anonymous") layout["infrahub_info"].update(Panel(infrahub_info, title="Infrahub Info")) diff --git a/infrahub_sdk/node/node.py b/infrahub_sdk/node/node.py index 3e0aa80e..e6d129c3 100644 --- a/infrahub_sdk/node/node.py +++ b/infrahub_sdk/node/node.py @@ -748,12 +748,11 @@ async def generate_query_data_node( continue peer_data: dict[str, Any] = {} - if rel_schema and prefetch_relationships: + should_fetch_relationship = prefetch_relationships or (include is not None and rel_name in include) + if rel_schema and should_fetch_relationship: peer_schema = await self._client.schema.get(kind=rel_schema.peer, branch=self._branch) peer_node = InfrahubNode(client=self._client, schema=peer_schema, branch=self._branch) peer_data = await peer_node.generate_query_data_node( - include=include, - exclude=exclude, property=property, ) @@ -892,7 +891,11 @@ async def update( await self._process_mutation_result(mutation_name=mutation_name, response=response, timeout=timeout) async def _process_relationships( - self, node_data: dict[str, Any], branch: str, related_nodes: list[InfrahubNode], timeout: int | None = None + self, + node_data: dict[str, Any], + branch: str, + related_nodes: list[InfrahubNode], + timeout: int | None = None, ) -> None: """Processes the Relationships of a InfrahubNode and add Related Nodes to a list. @@ -1369,7 +1372,8 @@ def generate_query_data_node( continue peer_data: dict[str, Any] = {} - if rel_schema and prefetch_relationships: + should_fetch_relationship = prefetch_relationships or (include is not None and rel_name in include) + if rel_schema and should_fetch_relationship: peer_schema = self._client.schema.get(kind=rel_schema.peer, branch=self._branch) peer_node = InfrahubNodeSync(client=self._client, schema=peer_schema, branch=self._branch) peer_data = peer_node.generate_query_data_node(include=include, exclude=exclude, property=property) diff --git a/tests/integration/test_node.py b/tests/integration/test_node.py index 3716dc1a..652e7203 100644 --- a/tests/integration/test_node.py +++ b/tests/integration/test_node.py @@ -85,6 +85,40 @@ async def test_node_create_with_relationships_using_related_node( assert node_after.owner.peer.id == person_joe.id assert node_after.owner.peer.typename == "TestingPerson" + async def test_node_filters_include( + self, + default_branch: str, + client: InfrahubClient, + initial_schema: None, + manufacturer_mercedes, + person_joe, + tag_red, + ) -> None: + car = await client.create( + kind=TESTING_CAR, + name="Tiguan2", + color="Black", + manufacturer=manufacturer_mercedes, + owner=person_joe, + tags=[tag_red], + ) + await car.save(allow_upsert=True) + assert car.id is not None + + # Clear store, as when we call `owner.peer`, we actually rely on the peer having being stored in store. + client.store._branches = {} + node_after = await client.get(kind=TESTING_CAR, id=car.id) + + with pytest.raises(NodeNotFoundError, match=f"Unable to find the node '{person_joe.id}' in the store"): + _ = node_after.owner.peer + + assert len(node_after.tags.peers) == 0 + + # Test both one and many relationships + node_after = await client.get(kind=TESTING_CAR, id=car.id, include=["tags", "owner"]) + assert [tag.id for tag in node_after.tags.peers] == [tag_red.id] + assert node_after.owner.peer.id == person_joe.id, f"{person_joe.id=}" + async def test_node_update_with_original_data( self, default_branch: str, diff --git a/tests/unit/ctl/test_cli.py b/tests/unit/ctl/test_cli.py index be0944b0..c2b856d3 100644 --- a/tests/unit/ctl/test_cli.py +++ b/tests/unit/ctl/test_cli.py @@ -33,7 +33,7 @@ def test_version_command() -> None: def test_info_command_success(mock_query_infrahub_version, mock_query_infrahub_user) -> None: - result = runner.invoke(app, ["info"]) + result = runner.invoke(app, ["info"], env={"INFRAHUB_API_TOKEN": "foo"}) assert result.exit_code == 0 for expected in ["Connection Status", "Python Version", "SDK Version", "Infrahub Version"]: assert expected in result.stdout, f"'{expected}' not found in info command output" @@ -46,15 +46,16 @@ def test_info_command_failure() -> None: def test_info_detail_command_success(mock_query_infrahub_version, mock_query_infrahub_user) -> None: + result = runner.invoke(app, ["info", "--detail"], env={"INFRAHUB_API_TOKEN": "foo"}) + assert result.exit_code == 0 + for expected in ["Connection Status", "Version Information", "Client Info", "Infrahub Info", "Groups:"]: + assert expected in result.stdout, f"'{expected}' not found in detailed info command output" + + +def test_anonymous_info_detail_command_success(mock_query_infrahub_version) -> None: result = runner.invoke(app, ["info", "--detail"]) assert result.exit_code == 0 - for expected in [ - "Connection Status", - "Version Information", - "Client Info", - "Infrahub Info", - "Groups:", - ]: + for expected in ["Connection Status", "Version Information", "Client Info", "Infrahub Info", "anonymous"]: assert expected in result.stdout, f"'{expected}' not found in detailed info command output" diff --git a/tests/unit/sdk/conftest.py b/tests/unit/sdk/conftest.py index 6943900e..5f0d7c2a 100644 --- a/tests/unit/sdk/conftest.py +++ b/tests/unit/sdk/conftest.py @@ -33,6 +33,11 @@ async def client() -> InfrahubClient: return InfrahubClient(config=Config(address="http://mock", insert_tracker=True, pagination_size=3)) +@pytest.fixture +async def client_sync() -> InfrahubClientSync: + return InfrahubClientSync(config=Config(address="http://mock", insert_tracker=True, pagination_size=3)) + + @pytest.fixture async def clients() -> BothClients: both = BothClients( diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index 33c43eb9..c5c75052 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -53,6 +53,23 @@ ] +async def set_builtin_tag_schema_cache(client) -> None: + # Set tag schema in cache to avoid needed to request the server. + builtin_tag_schema = { + "version": "1.0", + "nodes": [ + { + "name": "Tag", + "namespace": "Builtin", + "default_filter": "name__value", + "display_label": "name__value", + "branch": "aware", + } + ], + } + client.schema.set_cache(builtin_tag_schema) + + async def test_method_sanity() -> None: """Validate that there is at least one public method and that both clients look the same.""" assert async_node_methods @@ -1055,12 +1072,19 @@ async def test_query_data_generic_fragment(clients, mock_schema_query_02, client @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_include_property(client, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_query_data_include_property( + client, + client_sync, + location_schema: NodeSchemaAPI, + client_type, +) -> None: if client_type == "standard": + await set_builtin_tag_schema_cache(client) node = InfrahubNode(client=client, schema=location_schema) data = await node.generate_query_data(include=["tags"], property=True) else: - node = InfrahubNodeSync(client=client, schema=location_schema) + await set_builtin_tag_schema_cache(client_sync) + node = InfrahubNodeSync(client=client_sync, schema=location_schema) data = node.generate_query_data(include=["tags"], property=True) assert data == { @@ -1178,12 +1202,19 @@ async def test_query_data_include_property(client, location_schema: NodeSchemaAP @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_include(client, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_query_data_include( + client, + client_sync, + location_schema: NodeSchemaAPI, + client_type, +) -> None: if client_type == "standard": + await set_builtin_tag_schema_cache(client) node = InfrahubNode(client=client, schema=location_schema) data = await node.generate_query_data(include=["tags"]) else: - node = InfrahubNodeSync(client=client, schema=location_schema) + await set_builtin_tag_schema_cache(client_sync) + node = InfrahubNodeSync(client=client_sync, schema=location_schema) data = node.generate_query_data(include=["tags"]) assert data == {