Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog/236.fixed.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Respect default branch for client.query_gql_query() and client.set_context_properties()
1 change: 1 addition & 0 deletions changelog/374.fixed.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix branch creation with the sync client while setting `wait_until_completion=False`
1 change: 1 addition & 0 deletions changelog/398.fixed.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix `infrahubctl info` command when run as an anonymous user
2 changes: 1 addition & 1 deletion docs/docs/python-sdk/topics/object_file.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ Multiple object files can be loaded at once by specifying the path to multiple f
The `object load` command will create/update the objects using an `Upsert` operation. All objects previously loaded will NOT be deleted in the Infrahub instance.
Also, if some objects present in different files are identical and dependent on each other, the `object load` command will NOT calculate the dependencies between the objects and as such it's the responsibility of the users to execute the command in the right order.

> Object files can also be loaded into Infrahub when using external Git repositories. To see how to do this, please refer to the [.infrahub.yml](https://docs.infrahub.app/topics/infrahub-yml#objects) documentation.
> Object files can also be loaded into Infrahub when using external Git repositories. To see how to do this, please refer to the [.infrahub.yml](https://docs.infrahub.app/topics/infrahub-yml) documentation.

### Validate the format of object files

Expand Down
5 changes: 3 additions & 2 deletions infrahub_sdk/branch.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,13 +292,14 @@ def create(
},
}

query = Mutation(mutation="BranchCreate", input_data=input_data, query=MUTATION_QUERY_DATA)
mutation_query = MUTATION_QUERY_TASK if background_execution else MUTATION_QUERY_DATA
query = Mutation(mutation="BranchCreate", input_data=input_data, query=mutation_query)
response = self.client.execute_graphql(query=query.render(), tracker="mutation-branch-create")

# Make sure server version is recent enough to support background execution, as previously
# using background_execution=True had no effect.
if background_execution and "task" in response["BranchCreate"]:
return BranchData(**response["BranchCreate"]["task"]["id"])
return response["BranchCreate"]["task"]["id"]
return BranchData(**response["BranchCreate"]["object"])

def delete(self, branch_name: str) -> bool:
Expand Down
29 changes: 17 additions & 12 deletions infrahub_sdk/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ def set_context_properties(
delete_unused_nodes=delete_unused_nodes,
group_type=group_type,
group_params=group_params,
branch=branch,
branch=branch or self.default_branch,
)

def _graphql_url(
Expand Down Expand Up @@ -310,8 +310,7 @@ async def get_version(self) -> str:

async def get_user(self) -> dict:
"""Return user information"""
user_info = await self.execute_graphql(query=QUERY_USER)
return user_info
return await self.execute_graphql(query=QUERY_USER)

async def get_user_permissions(self) -> dict:
"""Return user permissions"""
Expand Down Expand Up @@ -540,6 +539,7 @@ async def _process_nodes_and_relationships(
schema_kind: str,
branch: str,
prefetch_relationships: bool,
include: list[str] | None,
timeout: int | None = None,
) -> ProcessRelationsNode:
"""Processes InfrahubNode and their Relationships from the GraphQL query response.
Expand All @@ -564,9 +564,12 @@ async def _process_nodes_and_relationships(
node = await InfrahubNode.from_graphql(client=self, branch=branch, data=item, timeout=timeout)
nodes.append(node)

if prefetch_relationships:
if prefetch_relationships or (include and any(rel in include for rel in node._relationships)):
await node._process_relationships(
node_data=item, branch=branch, related_nodes=related_nodes, timeout=timeout
node_data=item,
branch=branch,
related_nodes=related_nodes,
timeout=timeout,
)

return ProcessRelationsNode(nodes=nodes, related_nodes=related_nodes)
Expand Down Expand Up @@ -816,6 +819,7 @@ async def process_page(page_offset: int, page_number: int) -> tuple[dict, Proces
branch=branch,
prefetch_relationships=prefetch_relationships,
timeout=timeout,
include=include,
)
return response, process_result

Expand Down Expand Up @@ -1103,13 +1107,13 @@ async def query_gql_query(
) -> dict:
url = f"{self.address}/api/query/{name}"
url_params = copy.deepcopy(params or {})
url_params["branch"] = branch_name or self.default_branch

headers = copy.copy(self.headers or {})

if self.insert_tracker and tracker:
headers["X-Infrahub-Tracker"] = tracker

if branch_name:
url_params["branch"] = branch_name
if at:
url_params["at"] = at

Expand Down Expand Up @@ -1565,8 +1569,7 @@ def get_version(self) -> str:

def get_user(self) -> dict:
"""Return user information"""
user_info = self.execute_graphql(query=QUERY_USER)
return user_info
return self.execute_graphql(query=QUERY_USER)

def get_user_permissions(self) -> dict:
"""Return user permissions"""
Expand Down Expand Up @@ -1831,6 +1834,7 @@ def _process_nodes_and_relationships(
schema_kind: str,
branch: str,
prefetch_relationships: bool,
include: list[str] | None,
timeout: int | None = None,
) -> ProcessRelationsNodeSync:
"""Processes InfrahubNodeSync and their Relationships from the GraphQL query response.
Expand All @@ -1855,7 +1859,7 @@ def _process_nodes_and_relationships(
node = InfrahubNodeSync.from_graphql(client=self, branch=branch, data=item, timeout=timeout)
nodes.append(node)

if prefetch_relationships:
if prefetch_relationships or (include and any(rel in include for rel in node._relationships)):
node._process_relationships(node_data=item, branch=branch, related_nodes=related_nodes, timeout=timeout)

return ProcessRelationsNodeSync(nodes=nodes, related_nodes=related_nodes)
Expand Down Expand Up @@ -1980,6 +1984,7 @@ def process_page(page_offset: int, page_number: int) -> tuple[dict, ProcessRelat
branch=branch,
prefetch_relationships=prefetch_relationships,
timeout=timeout,
include=include,
)
return response, process_result

Expand Down Expand Up @@ -2242,13 +2247,13 @@ def query_gql_query(
) -> dict:
url = f"{self.address}/api/query/{name}"
url_params = copy.deepcopy(params or {})
url_params["branch"] = branch_name or self.default_branch

headers = copy.copy(self.headers or {})

if self.insert_tracker and tracker:
headers["X-Infrahub-Tracker"] = tracker

if branch_name:
url_params["branch"] = branch_name
if at:
url_params["at"] = at
if subscribers:
Expand Down
16 changes: 11 additions & 5 deletions infrahub_sdk/ctl/cli_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,20 +409,24 @@ def info( # noqa: PLR0915
_: str = CONFIG_PARAM,
) -> None:
"""Display the status of the Python SDK."""

info: dict[str, Any] = {
"error": None,
"status": ":x:",
"infrahub_version": "N/A",
"user_info": {},
"groups": {},
}
client = initialize_client_sync()
fetch_user_details = bool(client.config.username) or bool(client.config.api_token)

try:
client = initialize_client_sync()
info["infrahub_version"] = client.get_version()
info["user_info"] = client.get_user()

if fetch_user_details:
info["user_info"] = client.get_user()
info["groups"] = client.get_user_permissions()

info["status"] = ":white_heavy_check_mark:"
info["groups"] = client.get_user_permissions()
except Exception as e:
info["error"] = f"{e!s} ({e.__class__.__name__})"

Expand Down Expand Up @@ -469,7 +473,7 @@ def info( # noqa: PLR0915
pretty_model = Pretty(client.config.model_dump(), expand_all=True)
layout["client_info"].update(Panel(pretty_model, title="Client Info"))

# Infrahub information planel
# Infrahub information panel
infrahub_info = Table(show_header=False, box=None)
if info["user_info"]:
infrahub_info.add_row("User:", info["user_info"]["AccountProfile"]["display_label"])
Expand All @@ -487,6 +491,8 @@ def info( # noqa: PLR0915
infrahub_info.add_row("Groups:", "")
for group, roles in groups.items():
infrahub_info.add_row("", group, ", ".join(roles))
else:
infrahub_info.add_row("User:", "anonymous")

layout["infrahub_info"].update(Panel(infrahub_info, title="Infrahub Info"))

Expand Down
14 changes: 9 additions & 5 deletions infrahub_sdk/node/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -748,12 +748,11 @@ async def generate_query_data_node(
continue

peer_data: dict[str, Any] = {}
if rel_schema and prefetch_relationships:
should_fetch_relationship = prefetch_relationships or (include is not None and rel_name in include)
if rel_schema and should_fetch_relationship:
peer_schema = await self._client.schema.get(kind=rel_schema.peer, branch=self._branch)
peer_node = InfrahubNode(client=self._client, schema=peer_schema, branch=self._branch)
peer_data = await peer_node.generate_query_data_node(
include=include,
exclude=exclude,
property=property,
)

Expand Down Expand Up @@ -892,7 +891,11 @@ async def update(
await self._process_mutation_result(mutation_name=mutation_name, response=response, timeout=timeout)

async def _process_relationships(
self, node_data: dict[str, Any], branch: str, related_nodes: list[InfrahubNode], timeout: int | None = None
self,
node_data: dict[str, Any],
branch: str,
related_nodes: list[InfrahubNode],
timeout: int | None = None,
) -> None:
"""Processes the Relationships of a InfrahubNode and add Related Nodes to a list.

Expand Down Expand Up @@ -1369,7 +1372,8 @@ def generate_query_data_node(
continue

peer_data: dict[str, Any] = {}
if rel_schema and prefetch_relationships:
should_fetch_relationship = prefetch_relationships or (include is not None and rel_name in include)
if rel_schema and should_fetch_relationship:
peer_schema = self._client.schema.get(kind=rel_schema.peer, branch=self._branch)
peer_node = InfrahubNodeSync(client=self._client, schema=peer_schema, branch=self._branch)
peer_data = peer_node.generate_query_data_node(include=include, exclude=exclude, property=property)
Expand Down
34 changes: 34 additions & 0 deletions tests/integration/test_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,40 @@ async def test_node_create_with_relationships_using_related_node(
assert node_after.owner.peer.id == person_joe.id
assert node_after.owner.peer.typename == "TestingPerson"

async def test_node_filters_include(
self,
default_branch: str,
client: InfrahubClient,
initial_schema: None,
manufacturer_mercedes,
person_joe,
tag_red,
) -> None:
car = await client.create(
kind=TESTING_CAR,
name="Tiguan2",
color="Black",
manufacturer=manufacturer_mercedes,
owner=person_joe,
tags=[tag_red],
)
await car.save(allow_upsert=True)
assert car.id is not None

# Clear store, as when we call `owner.peer`, we actually rely on the peer having being stored in store.
client.store._branches = {}
node_after = await client.get(kind=TESTING_CAR, id=car.id)

with pytest.raises(NodeNotFoundError, match=f"Unable to find the node '{person_joe.id}' in the store"):
_ = node_after.owner.peer

assert len(node_after.tags.peers) == 0

# Test both one and many relationships
node_after = await client.get(kind=TESTING_CAR, id=car.id, include=["tags", "owner"])
assert [tag.id for tag in node_after.tags.peers] == [tag_red.id]
assert node_after.owner.peer.id == person_joe.id, f"{person_joe.id=}"

async def test_node_update_with_original_data(
self,
default_branch: str,
Expand Down
17 changes: 9 additions & 8 deletions tests/unit/ctl/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def test_version_command() -> None:


def test_info_command_success(mock_query_infrahub_version, mock_query_infrahub_user) -> None:
result = runner.invoke(app, ["info"])
result = runner.invoke(app, ["info"], env={"INFRAHUB_API_TOKEN": "foo"})
assert result.exit_code == 0
for expected in ["Connection Status", "Python Version", "SDK Version", "Infrahub Version"]:
assert expected in result.stdout, f"'{expected}' not found in info command output"
Expand All @@ -46,15 +46,16 @@ def test_info_command_failure() -> None:


def test_info_detail_command_success(mock_query_infrahub_version, mock_query_infrahub_user) -> None:
result = runner.invoke(app, ["info", "--detail"], env={"INFRAHUB_API_TOKEN": "foo"})
assert result.exit_code == 0
for expected in ["Connection Status", "Version Information", "Client Info", "Infrahub Info", "Groups:"]:
assert expected in result.stdout, f"'{expected}' not found in detailed info command output"


def test_anonymous_info_detail_command_success(mock_query_infrahub_version) -> None:
result = runner.invoke(app, ["info", "--detail"])
assert result.exit_code == 0
for expected in [
"Connection Status",
"Version Information",
"Client Info",
"Infrahub Info",
"Groups:",
]:
for expected in ["Connection Status", "Version Information", "Client Info", "Infrahub Info", "anonymous"]:
assert expected in result.stdout, f"'{expected}' not found in detailed info command output"


Expand Down
5 changes: 5 additions & 0 deletions tests/unit/sdk/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ async def client() -> InfrahubClient:
return InfrahubClient(config=Config(address="http://mock", insert_tracker=True, pagination_size=3))


@pytest.fixture
async def client_sync() -> InfrahubClientSync:
return InfrahubClientSync(config=Config(address="http://mock", insert_tracker=True, pagination_size=3))


@pytest.fixture
async def clients() -> BothClients:
both = BothClients(
Expand Down
39 changes: 35 additions & 4 deletions tests/unit/sdk/test_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,23 @@
]


async def set_builtin_tag_schema_cache(client) -> None:
# Set tag schema in cache to avoid needed to request the server.
builtin_tag_schema = {
"version": "1.0",
"nodes": [
{
"name": "Tag",
"namespace": "Builtin",
"default_filter": "name__value",
"display_label": "name__value",
"branch": "aware",
}
],
}
client.schema.set_cache(builtin_tag_schema)


async def test_method_sanity() -> None:
"""Validate that there is at least one public method and that both clients look the same."""
assert async_node_methods
Expand Down Expand Up @@ -1055,12 +1072,19 @@ async def test_query_data_generic_fragment(clients, mock_schema_query_02, client


@pytest.mark.parametrize("client_type", client_types)
async def test_query_data_include_property(client, location_schema: NodeSchemaAPI, client_type) -> None:
async def test_query_data_include_property(
client,
client_sync,
location_schema: NodeSchemaAPI,
client_type,
) -> None:
if client_type == "standard":
await set_builtin_tag_schema_cache(client)
node = InfrahubNode(client=client, schema=location_schema)
data = await node.generate_query_data(include=["tags"], property=True)
else:
node = InfrahubNodeSync(client=client, schema=location_schema)
await set_builtin_tag_schema_cache(client_sync)
node = InfrahubNodeSync(client=client_sync, schema=location_schema)
data = node.generate_query_data(include=["tags"], property=True)

assert data == {
Expand Down Expand Up @@ -1178,12 +1202,19 @@ async def test_query_data_include_property(client, location_schema: NodeSchemaAP


@pytest.mark.parametrize("client_type", client_types)
async def test_query_data_include(client, location_schema: NodeSchemaAPI, client_type) -> None:
async def test_query_data_include(
client,
client_sync,
location_schema: NodeSchemaAPI,
client_type,
) -> None:
if client_type == "standard":
await set_builtin_tag_schema_cache(client)
node = InfrahubNode(client=client, schema=location_schema)
data = await node.generate_query_data(include=["tags"])
else:
node = InfrahubNodeSync(client=client, schema=location_schema)
await set_builtin_tag_schema_cache(client_sync)
node = InfrahubNodeSync(client=client_sync, schema=location_schema)
data = node.generate_query_data(include=["tags"])

assert data == {
Expand Down