diff --git a/CHANGELOG.md b/CHANGELOG.md index dc5cd08b..9a565484 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,28 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang +## [1.13.5](https://github.com/opsmill/infrahub-sdk-python/tree/v1.13.5) - 2025-07-23 + +### Fixed + +- Respect ordering when loading files from a directory + +## [1.13.4](https://github.com/opsmill/infrahub-sdk-python/tree/v1.13.4) - 2025-07-22 + +### Fixed + +- Fix processing of relationshhip during nodes retrieval using the Sync Client, when prefecthing related_nodes. ([#461](https://github.com/opsmill/infrahub-sdk-python/issues/461)) +- Fix schema loading to ignore non-YAML files in folders. ([#462](https://github.com/opsmill/infrahub-sdk-python/issues/462)) +- Fix ignored node variable in filters(). ([#469](https://github.com/opsmill/infrahub-sdk-python/issues/469)) +- Fix use of parallel with filters for Infrahub Client Sync. +- Avoid sending empty list to infrahub if no valids schemas are found. + +## [1.13.3](https://github.com/opsmill/infrahub-sdk-python/tree/v1.13.3) - 2025-06-30 + +### Fixed + +- Update InfrahubNode creation to include __typename, display_label, and kind from a RelatedNode ([#455](https://github.com/opsmill/infrahub-sdk-python/issues/455)) + ## [1.13.2](https://github.com/opsmill/infrahub-sdk-python/tree/v1.13.2) - 2025-06-27 ### Fixed diff --git a/changelog/+add_numberpool_support_protocols.added.md b/changelog/+add_numberpool_support_protocols.added.md new file mode 100644 index 00000000..aef27a24 --- /dev/null +++ b/changelog/+add_numberpool_support_protocols.added.md @@ -0,0 +1 @@ +add support for NumberPool attributes in generated protocols diff --git a/changelog/+batch.fixed.md b/changelog/+batch.fixed.md new file mode 100644 index 00000000..635d6b62 --- /dev/null +++ b/changelog/+batch.fixed.md @@ -0,0 +1 @@ +Create a new batch while fetching relationships instead of using the reusing the same one. \ No newline at end of file diff --git a/changelog/+branch-in-count.fixed.md b/changelog/+branch-in-count.fixed.md new file mode 100644 index 00000000..b4227386 --- /dev/null +++ b/changelog/+branch-in-count.fixed.md @@ -0,0 +1 @@ +Update internal calls to `count` to include the branch parameter so that the query is performed on the correct branch \ No newline at end of file diff --git a/changelog/466.added.md b/changelog/466.added.md new file mode 100644 index 00000000..49d639ef --- /dev/null +++ b/changelog/466.added.md @@ -0,0 +1 @@ +Added `infrahubctl repository init` command to allow the initialization of an Infrahub repository using [infrahub-template](https://github.com/opsmill/infrahub-template). \ No newline at end of file diff --git a/changelog/6882.fixed.md b/changelog/6882.fixed.md new file mode 100644 index 00000000..c0c8cebc --- /dev/null +++ b/changelog/6882.fixed.md @@ -0,0 +1 @@ +Fix value lookup using a flat notation like `foo__bar__value` with relationships of cardinality one \ No newline at end of file diff --git a/docs/docs/infrahubctl/infrahubctl-repository.mdx b/docs/docs/infrahubctl/infrahubctl-repository.mdx index 94375125..eed5af5a 100644 --- a/docs/docs/infrahubctl/infrahubctl-repository.mdx +++ b/docs/docs/infrahubctl/infrahubctl-repository.mdx @@ -19,6 +19,7 @@ $ infrahubctl repository [OPTIONS] COMMAND [ARGS]... **Commands**: * `add`: Add a new repository. +* `init`: Initialize a new Infrahub repository. * `list` ## `infrahubctl repository add` @@ -47,6 +48,29 @@ $ infrahubctl repository add [OPTIONS] NAME LOCATION * `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. +## `infrahubctl repository init` + +Initialize a new Infrahub repository. + +**Usage**: + +```console +$ infrahubctl repository init [OPTIONS] DIRECTORY +``` + +**Arguments**: + +* `DIRECTORY`: Directory path for the new project. [required] + +**Options**: + +* `--template TEXT`: Template to use for the new repository. Can be a local path or a git repository URL. [default: https://github.com/opsmill/infrahub-template.git] +* `--data PATH`: Path to YAML file containing answers to CLI prompt. +* `--vcs-ref TEXT`: VCS reference to use for the template. Defaults to HEAD. [default: HEAD] +* `--trust / --no-trust`: Trust the template repository. If set, the template will be cloned without verification. [default: no-trust] +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--help`: Show this message and exit. + ## `infrahubctl repository list` **Usage**: diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index dc1f539f..16c1c73a 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -784,7 +784,6 @@ async def filters( if at: at = Timestamp(at) - node = InfrahubNode(client=self, schema=schema, branch=branch) filters = kwargs pagination_size = self.pagination_size @@ -825,12 +824,12 @@ async def process_batch() -> tuple[list[InfrahubNode], list[InfrahubNode]]: nodes = [] related_nodes = [] batch_process = await self.create_batch() - count = await self.count(kind=schema.kind, partial_match=partial_match, **filters) + count = await self.count(kind=schema.kind, branch=branch, partial_match=partial_match, **filters) total_pages = (count + pagination_size - 1) // pagination_size for page_number in range(1, total_pages + 1): page_offset = (page_number - 1) * pagination_size - batch_process.add(task=process_page, node=node, page_offset=page_offset, page_number=page_number) + batch_process.add(task=process_page, page_offset=page_offset, page_number=page_number) async for _, response in batch_process.execute(): nodes.extend(response[1]["nodes"]) @@ -847,7 +846,7 @@ async def process_non_batch() -> tuple[list[InfrahubNode], list[InfrahubNode]]: while has_remaining_items: page_offset = (page_number - 1) * pagination_size - response, process_result = await process_page(page_offset, page_number) + response, process_result = await process_page(page_offset=page_offset, page_number=page_number) nodes.extend(process_result["nodes"]) related_nodes.extend(process_result["related_nodes"]) @@ -1946,9 +1945,9 @@ def filters( """ branch = branch or self.default_branch schema = self.schema.get(kind=kind, branch=branch) - node = InfrahubNodeSync(client=self, schema=schema, branch=branch) if at: at = Timestamp(at) + filters = kwargs pagination_size = self.pagination_size @@ -1990,12 +1989,12 @@ def process_batch() -> tuple[list[InfrahubNodeSync], list[InfrahubNodeSync]]: related_nodes = [] batch_process = self.create_batch() - count = self.count(kind=schema.kind, partial_match=partial_match, **filters) + count = self.count(kind=schema.kind, branch=branch, partial_match=partial_match, **filters) total_pages = (count + pagination_size - 1) // pagination_size for page_number in range(1, total_pages + 1): page_offset = (page_number - 1) * pagination_size - batch_process.add(task=process_page, node=node, page_offset=page_offset, page_number=page_number) + batch_process.add(task=process_page, page_offset=page_offset, page_number=page_number) for _, response in batch_process.execute(): nodes.extend(response[1]["nodes"]) @@ -2012,7 +2011,7 @@ def process_non_batch() -> tuple[list[InfrahubNodeSync], list[InfrahubNodeSync]] while has_remaining_items: page_offset = (page_number - 1) * pagination_size - response, process_result = process_page(page_offset, page_number) + response, process_result = process_page(page_offset=page_offset, page_number=page_number) nodes.extend(process_result["nodes"]) related_nodes.extend(process_result["related_nodes"]) diff --git a/infrahub_sdk/ctl/repository.py b/infrahub_sdk/ctl/repository.py index 98e394bf..d23f8484 100644 --- a/infrahub_sdk/ctl/repository.py +++ b/infrahub_sdk/ctl/repository.py @@ -1,10 +1,12 @@ from __future__ import annotations +import asyncio from pathlib import Path from typing import Optional import typer import yaml +from copier import run_copy from pydantic import ValidationError from rich.console import Console from rich.table import Table @@ -165,3 +167,52 @@ async def list( ) console.print(table) + + +@app.command() +async def init( + directory: Path = typer.Argument(help="Directory path for the new project."), + template: str = typer.Option( + default="https://github.com/opsmill/infrahub-template.git", + help="Template to use for the new repository. Can be a local path or a git repository URL.", + ), + data: Optional[Path] = typer.Option(default=None, help="Path to YAML file containing answers to CLI prompt."), + vcs_ref: Optional[str] = typer.Option( + default="HEAD", + help="VCS reference to use for the template. Defaults to HEAD.", + ), + trust: Optional[bool] = typer.Option( + default=False, + help="Trust the template repository. If set, the template will be cloned without verification.", + ), + _: str = CONFIG_PARAM, +) -> None: + """Initialize a new Infrahub repository.""" + + config_data = None + if data: + try: + with Path.open(data, encoding="utf-8") as file: + config_data = yaml.safe_load(file) + typer.echo(f"Loaded config: {config_data}") + except Exception as exc: + typer.echo(f"Error loading YAML file: {exc}", err=True) + raise typer.Exit(code=1) + + # Allow template to be a local path or a URL + template_source = template or "" + if template and Path(template).exists(): + template_source = str(Path(template).resolve()) + + try: + await asyncio.to_thread( + run_copy, + template_source, + str(directory), + data=config_data, + vcs_ref=vcs_ref, + unsafe=trust, + ) + except Exception as e: + typer.echo(f"Error running copier: {e}", err=True) + raise typer.Exit(code=1) diff --git a/infrahub_sdk/ctl/schema.py b/infrahub_sdk/ctl/schema.py index 6e9ff994..8c18b395 100644 --- a/infrahub_sdk/ctl/schema.py +++ b/infrahub_sdk/ctl/schema.py @@ -36,7 +36,7 @@ def validate_schema_content_and_exit(client: InfrahubClient, schemas: list[Schem has_error: bool = False for schema_file in schemas: try: - client.schema.validate(data=schema_file.content) + client.schema.validate(data=schema_file.payload) except ValidationError as exc: console.print(f"[red]Schema not valid, found '{len(exc.errors())}' error(s) in {schema_file.location}") has_error = True @@ -48,7 +48,7 @@ def validate_schema_content_and_exit(client: InfrahubClient, schemas: list[Schem raise typer.Exit(1) -def display_schema_load_errors(response: dict[str, Any], schemas_data: list[dict]) -> None: +def display_schema_load_errors(response: dict[str, Any], schemas_data: list[SchemaFile]) -> None: console.print("[red]Unable to load the schema:") if "detail" not in response: handle_non_detail_errors(response=response) @@ -87,7 +87,7 @@ def handle_non_detail_errors(response: dict[str, Any]) -> None: if "error" in response: console.print(f" {response.get('error')}") elif "errors" in response: - for error in response.get("errors"): + for error in response["errors"]: console.print(f" {error.get('message')}") else: console.print(f" '{response}'") @@ -97,9 +97,9 @@ def valid_error_path(loc_path: list[Any]) -> bool: return len(loc_path) >= 6 and loc_path[0] == "body" and loc_path[1] == "schemas" -def get_node(schemas_data: list[dict], schema_index: int, node_index: int) -> dict | None: - if schema_index < len(schemas_data) and node_index < len(schemas_data[schema_index].content["nodes"]): - return schemas_data[schema_index].content["nodes"][node_index] +def get_node(schemas_data: list[SchemaFile], schema_index: int, node_index: int) -> dict | None: + if schema_index < len(schemas_data) and node_index < len(schemas_data[schema_index].payload["nodes"]): + return schemas_data[schema_index].payload["nodes"][node_index] return None @@ -122,7 +122,7 @@ async def load( validate_schema_content_and_exit(client=client, schemas=schemas_data) start_time = time.time() - response = await client.schema.load(schemas=[item.content for item in schemas_data], branch=branch) + response = await client.schema.load(schemas=[item.payload for item in schemas_data], branch=branch) loading_time = time.time() - start_time if response.errors: @@ -170,10 +170,10 @@ async def check( client = initialize_client() validate_schema_content_and_exit(client=client, schemas=schemas_data) - success, response = await client.schema.check(schemas=[item.content for item in schemas_data], branch=branch) + success, response = await client.schema.check(schemas=[item.payload for item in schemas_data], branch=branch) if not success: - display_schema_load_errors(response=response, schemas_data=schemas_data) + display_schema_load_errors(response=response or {}, schemas_data=schemas_data) else: for schema_file in schemas_data: console.print(f"[green] schema '{schema_file.location}' is Valid!") diff --git a/infrahub_sdk/ctl/utils.py b/infrahub_sdk/ctl/utils.py index 63d7dfb8..66f86865 100644 --- a/infrahub_sdk/ctl/utils.py +++ b/infrahub_sdk/ctl/utils.py @@ -187,6 +187,9 @@ def load_yamlfile_from_disk_and_exit( has_error = False try: data_files = file_type.load_from_disk(paths=paths) + if not data_files: + console.print("[red]No valid files found to load.") + raise typer.Exit(1) except FileNotValidError as exc: console.print(f"[red]{exc.message}") raise typer.Exit(1) from exc diff --git a/infrahub_sdk/node/node.py b/infrahub_sdk/node/node.py index 7a095586..242281b5 100644 --- a/infrahub_sdk/node/node.py +++ b/infrahub_sdk/node/node.py @@ -8,7 +8,7 @@ from ..exceptions import FeatureNotSupportedError, NodeNotFoundError, ResourceNotDefinedError, SchemaNotFoundError from ..graphql import Mutation, Query from ..schema import GenericSchemaAPI, RelationshipCardinality, RelationshipKind -from ..utils import compare_lists, generate_short_id, get_flat_value +from ..utils import compare_lists, generate_short_id from .attribute import Attribute from .constants import ( ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE, @@ -402,10 +402,10 @@ def generate_query_data_init( if order: data["@filters"]["order"] = order - if offset: + if offset is not None: data["@filters"]["offset"] = offset - if limit: + if limit is not None: data["@filters"]["limit"] = limit if include and exclude: @@ -418,14 +418,6 @@ def generate_query_data_init( return data - def extract(self, params: dict[str, str]) -> dict[str, Any]: - """Extract some datapoints defined in a flat notation.""" - result: dict[str, Any] = {} - for key, value in params.items(): - result[key] = get_flat_value(self, key=value) - - return result - def __hash__(self) -> int: return hash(self.id) @@ -507,11 +499,17 @@ def _init_relationships(self, data: dict | RelatedNode | None = None) -> None: if rel_schema.cardinality == "one": if isinstance(rel_data, RelatedNode): - peer_id_data: dict[str, Any] = {} - if rel_data.id: - peer_id_data["id"] = rel_data.id - if rel_data.hfid: - peer_id_data["hfid"] = rel_data.hfid + peer_id_data: dict[str, Any] = { + key: value + for key, value in ( + ("id", rel_data.id), + ("hfid", rel_data.hfid), + ("__typename", rel_data.typename), + ("kind", rel_data.kind), + ("display_label", rel_data.display_label), + ) + if value is not None + } if peer_id_data: rel_data = peer_id_data else: @@ -1030,6 +1028,46 @@ def _get_relationship_one(self, name: str) -> RelatedNode: raise ResourceNotDefinedError(message=f"The node doesn't have a cardinality=one relationship for {name}") + async def get_flat_value(self, key: str, separator: str = "__") -> Any: + """Query recursively a value defined in a flat notation (string), on a hierarchy of objects + + Examples: + name__value + module.object.value + """ + if separator not in key: + return getattr(self, key) + + first, remaining = key.split(separator, maxsplit=1) + + if first in self._schema.attribute_names: + attr = getattr(self, first) + for part in remaining.split(separator): + attr = getattr(attr, part) + return attr + + try: + rel = self._schema.get_relationship(name=first) + except ValueError as exc: + raise ValueError(f"No attribute or relationship named '{first}' for '{self._schema.kind}'") from exc + + if rel.cardinality != RelationshipCardinality.ONE: + raise ValueError( + f"Can only look up flat value for relationships of cardinality {RelationshipCardinality.ONE.value}" + ) + + related_node: RelatedNode = getattr(self, first) + await related_node.fetch() + return await related_node.peer.get_flat_value(key=remaining, separator=separator) + + async def extract(self, params: dict[str, str]) -> dict[str, Any]: + """Extract some datapoints defined in a flat notation.""" + result: dict[str, Any] = {} + for key, value in params.items(): + result[key] = await self.get_flat_value(key=value) + + return result + def __dir__(self) -> Iterable[str]: base = list(super().__dir__()) return sorted( @@ -1090,11 +1128,17 @@ def _init_relationships(self, data: dict | None = None) -> None: if rel_schema.cardinality == "one": if isinstance(rel_data, RelatedNodeSync): - peer_id_data: dict[str, Any] = {} - if rel_data.id: - peer_id_data["id"] = rel_data.id - if rel_data.hfid: - peer_id_data["hfid"] = rel_data.hfid + peer_id_data: dict[str, Any] = { + key: value + for key, value in ( + ("id", rel_data.id), + ("hfid", rel_data.hfid), + ("__typename", rel_data.typename), + ("kind", rel_data.kind), + ("display_label", rel_data.display_label), + ) + if value is not None + } if peer_id_data: rel_data = peer_id_data else: @@ -1481,15 +1525,15 @@ def _process_relationships( for rel_name in self._relationships: rel = getattr(self, rel_name) if rel and isinstance(rel, RelatedNodeSync): - relation = node_data["node"].get(rel_name) - if relation.get("node", None): + relation = node_data["node"].get(rel_name, None) + if relation and relation.get("node", None): related_node = InfrahubNodeSync.from_graphql( client=self._client, branch=branch, data=relation, timeout=timeout ) related_nodes.append(related_node) elif rel and isinstance(rel, RelationshipManagerSync): - peers = node_data["node"].get(rel_name) - if peers: + peers = node_data["node"].get(rel_name, None) + if peers and peers["edges"]: for peer in peers["edges"]: related_node = InfrahubNodeSync.from_graphql( client=self._client, branch=branch, data=peer, timeout=timeout @@ -1610,6 +1654,46 @@ def _get_relationship_one(self, name: str) -> RelatedNode | RelatedNodeSync: raise ResourceNotDefinedError(message=f"The node doesn't have a cardinality=one relationship for {name}") + def get_flat_value(self, key: str, separator: str = "__") -> Any: + """Query recursively a value defined in a flat notation (string), on a hierarchy of objects + + Examples: + name__value + module.object.value + """ + if separator not in key: + return getattr(self, key) + + first, remaining = key.split(separator, maxsplit=1) + + if first in self._schema.attribute_names: + attr = getattr(self, first) + for part in remaining.split(separator): + attr = getattr(attr, part) + return attr + + try: + rel = self._schema.get_relationship(name=first) + except ValueError as exc: + raise ValueError(f"No attribute or relationship named '{first}' for '{self._schema.kind}'") from exc + + if rel.cardinality != RelationshipCardinality.ONE: + raise ValueError( + f"Can only look up flat value for relationships of cardinality {RelationshipCardinality.ONE.value}" + ) + + related_node: RelatedNodeSync = getattr(self, first) + related_node.fetch() + return related_node.peer.get_flat_value(key=remaining, separator=separator) + + def extract(self, params: dict[str, str]) -> dict[str, Any]: + """Extract some datapoints defined in a flat notation.""" + result: dict[str, Any] = {} + for key, value in params.items(): + result[key] = self.get_flat_value(key=value) + + return result + def __dir__(self) -> Iterable[str]: base = list(super().__dir__()) return sorted( diff --git a/infrahub_sdk/node/related_node.py b/infrahub_sdk/node/related_node.py index 60d46ca9..bf6cb532 100644 --- a/infrahub_sdk/node/related_node.py +++ b/infrahub_sdk/node/related_node.py @@ -39,6 +39,7 @@ def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict, self._hfid: list[str] | None = None self._display_label: str | None = None self._typename: str | None = None + self._kind: str | None = None if isinstance(data, (CoreNodeBase)): self._peer = data @@ -118,6 +119,12 @@ def typename(self) -> str | None: return self._peer.typename return self._typename + @property + def kind(self) -> str | None: + if self._peer: + return self._peer.get_kind() + return self._kind + def _generate_input_data(self, allocate_from_pool: bool = False) -> dict[str, Any]: data: dict[str, Any] = {} diff --git a/infrahub_sdk/node/relationship.py b/infrahub_sdk/node/relationship.py index c527dc50..8473a1cb 100644 --- a/infrahub_sdk/node/relationship.py +++ b/infrahub_sdk/node/relationship.py @@ -1,11 +1,15 @@ from __future__ import annotations +from collections import defaultdict from collections.abc import Iterable from typing import TYPE_CHECKING, Any +from ..batch import InfrahubBatch from ..exceptions import ( + Error, UninitializedError, ) +from ..types import Order from .constants import PROPERTIES_FLAG, PROPERTIES_OBJECT from .related_node import RelatedNode, RelatedNodeSync @@ -156,8 +160,26 @@ async def fetch(self) -> None: self.peers = rm.peers self.initialized = True + ids_per_kind_map = defaultdict(list) for peer in self.peers: - await peer.fetch() # type: ignore[misc] + if not peer.id or not peer.typename: + raise Error("Unable to fetch the peer, id and/or typename are not defined") + ids_per_kind_map[peer.typename].append(peer.id) + + batch = InfrahubBatch(max_concurrent_execution=self.client.max_concurrent_execution) + for kind, ids in ids_per_kind_map.items(): + batch.add( + task=self.client.filters, + kind=kind, + ids=ids, + populate_store=True, + branch=self.branch, + parallel=True, + order=Order(disable=True), + ) + + async for _ in batch.execute(): + pass def add(self, data: str | RelatedNode | dict) -> None: """Add a new peer to this relationship.""" @@ -261,8 +283,27 @@ def fetch(self) -> None: self.peers = rm.peers self.initialized = True + ids_per_kind_map = defaultdict(list) for peer in self.peers: - peer.fetch() + if not peer.id or not peer.typename: + raise Error("Unable to fetch the peer, id and/or typename are not defined") + ids_per_kind_map[peer.typename].append(peer.id) + + # Unlike Async, no need to create a new batch from scratch because we are not using a semaphore + batch = self.client.create_batch() + for kind, ids in ids_per_kind_map.items(): + batch.add( + task=self.client.filters, + kind=kind, + ids=ids, + populate_store=True, + branch=self.branch, + parallel=True, + order=Order(disable=True), + ) + + for _ in batch.execute(): + pass def add(self, data: str | RelatedNodeSync | dict) -> None: """Add a new peer to this relationship.""" diff --git a/infrahub_sdk/protocols_base.py b/infrahub_sdk/protocols_base.py index a47d95ef..a3daa1fb 100644 --- a/infrahub_sdk/protocols_base.py +++ b/infrahub_sdk/protocols_base.py @@ -204,8 +204,6 @@ def is_resource_pool(self) -> bool: ... def get_raw_graphql_data(self) -> dict | None: ... - def extract(self, params: dict[str, str]) -> dict[str, Any]: ... - @runtime_checkable class CoreNode(CoreNodeBase, Protocol): diff --git a/infrahub_sdk/protocols_generator/constants.py b/infrahub_sdk/protocols_generator/constants.py index d0bdb076..63c3dbb6 100644 --- a/infrahub_sdk/protocols_generator/constants.py +++ b/infrahub_sdk/protocols_generator/constants.py @@ -22,6 +22,7 @@ "List": "ListAttribute", "JSON": "JSONAttribute", "Any": "AnyAttribute", + "NumberPool": "Integer", } # The order of the classes in the list determines the order of the classes in the generated code diff --git a/infrahub_sdk/utils.py b/infrahub_sdk/utils.py index b505dbeb..9232b32d 100644 --- a/infrahub_sdk/utils.py +++ b/infrahub_sdk/utils.py @@ -190,23 +190,6 @@ def str_to_bool(value: str) -> bool: raise ValueError(f"{value} can not be converted into a boolean") from exc -def get_flat_value(obj: Any, key: str, separator: str = "__") -> Any: - """Query recursively an value defined in a flat notation (string), on a hierarchy of objects - - Examples: - name__value - module.object.value - """ - if separator not in key: - return getattr(obj, key) - - first_part, remaining_part = key.split(separator, maxsplit=1) - sub_obj = getattr(obj, first_part) - if not sub_obj: - return None - return get_flat_value(obj=sub_obj, key=remaining_part, separator=separator) - - def generate_request_filename(request: httpx.Request) -> str: """Return a filename for a request sent to the Infrahub API diff --git a/infrahub_sdk/yaml.py b/infrahub_sdk/yaml.py index 8e3f2f73..6b764081 100644 --- a/infrahub_sdk/yaml.py +++ b/infrahub_sdk/yaml.py @@ -120,16 +120,22 @@ def load_file_from_disk(cls, path: Path) -> list[Self]: @classmethod def load_from_disk(cls, paths: list[Path]) -> list[Self]: yaml_files: list[Self] = [] + file_extensions = {".yaml", ".yml", ".json"} # FIXME: .json is not a YAML file, should be removed + for file_path in paths: - if file_path.is_file() and file_path.suffix in [".yaml", ".yml", ".json"]: - yaml_files.extend(cls.load_file_from_disk(path=file_path)) + if not file_path.exists(): + # Check if the provided path exists, relevant for the first call coming from the user + raise FileNotValidError(name=str(file_path), message=f"{file_path} does not exist!") + if file_path.is_file(): + if file_path.suffix in file_extensions: + yaml_files.extend(cls.load_file_from_disk(path=file_path)) + # else: silently skip files with unrelevant extensions (e.g. .md, .py...) elif file_path.is_dir(): + # Introduce recursion to handle sub-folders sub_paths = [Path(sub_file_path) for sub_file_path in file_path.glob("*")] - sub_files = cls.load_from_disk(paths=sub_paths) - sorted_sub_files = sorted(sub_files, key=lambda x: x.location) - yaml_files.extend(sorted_sub_files) - else: - raise FileNotValidError(name=str(file_path), message=f"{file_path} does not exist!") + sub_paths = sorted(sub_paths, key=lambda p: p.name) + yaml_files.extend(cls.load_from_disk(paths=sub_paths)) + # else: skip non-file, non-dir (e.g., symlink...) return yaml_files diff --git a/poetry.lock b/poetry.lock index a1b4c065..694d1253 100644 --- a/poetry.lock +++ b/poetry.lock @@ -220,7 +220,37 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "(extra == \"ctl\" or extra == \"all\") and platform_system == \"Windows\" or sys_platform == \"win32\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\""} +markers = {main = "extra == \"ctl\" or extra == \"all\" or sys_platform == \"win32\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\""} + +[[package]] +name = "copier" +version = "9.8.0" +description = "A library for rendering project templates." +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"ctl\" or extra == \"all\"" +files = [ + {file = "copier-9.8.0-py3-none-any.whl", hash = "sha256:ca0bee47f198b66cec926c4f1a3aa77f11ee0102624369c10e42ca9058c0a891"}, + {file = "copier-9.8.0.tar.gz", hash = "sha256:343ac1eb65e678aa355690d7f19869ef07cabf837f511a87ed452443c085ec58"}, +] + +[package.dependencies] +colorama = ">=0.4.6" +dunamai = ">=1.7.0" +eval-type-backport = {version = ">=0.1.3,<0.3.0", markers = "python_version < \"3.10\""} +funcy = ">=1.17" +jinja2 = ">=3.1.5" +jinja2-ansible-filters = ">=1.3.1" +packaging = ">=23.0" +pathspec = ">=0.9.0" +platformdirs = ">=4.3.6" +plumbum = ">=1.6.9" +pydantic = ">=2.4.2" +pygments = ">=2.7.1" +pyyaml = ">=5.3.1" +questionary = ">=1.8.1" +typing-extensions = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} [[package]] name = "coverage" @@ -446,6 +476,22 @@ https = ["urllib3 (>=1.24.1)"] paramiko = ["paramiko"] pgp = ["gpg"] +[[package]] +name = "dunamai" +version = "1.25.0" +description = "Dynamic version generation" +optional = true +python-versions = ">=3.5" +groups = ["main"] +markers = "extra == \"ctl\" or extra == \"all\"" +files = [ + {file = "dunamai-1.25.0-py3-none-any.whl", hash = "sha256:7f9dc687dd3256e613b6cc978d9daabfd2bb5deb8adc541fc135ee423ffa98ab"}, + {file = "dunamai-1.25.0.tar.gz", hash = "sha256:a7f8360ea286d3dbaf0b6a1473f9253280ac93d619836ad4514facb70c0719d1"}, +] + +[package.dependencies] +packaging = ">=20.9" + [[package]] name = "eval-type-backport" version = "0.2.2" @@ -525,6 +571,19 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2. testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] +[[package]] +name = "funcy" +version = "2.0" +description = "A fancy and practical functional tools" +optional = true +python-versions = "*" +groups = ["main"] +markers = "extra == \"ctl\" or extra == \"all\"" +files = [ + {file = "funcy-2.0-py2.py3-none-any.whl", hash = "sha256:53df23c8bb1651b12f095df764bfb057935d49537a56de211b098f4c79614bb0"}, + {file = "funcy-2.0.tar.gz", hash = "sha256:3963315d59d41c6f30c04bc910e10ab50a3ac4a225868bfa96feed133df075cb"}, +] + [[package]] name = "graphql-core" version = "3.2.4" @@ -796,6 +855,26 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jinja2-ansible-filters" +version = "1.3.2" +description = "A port of Ansible's jinja2 filters without requiring ansible core." +optional = true +python-versions = "*" +groups = ["main"] +markers = "extra == \"ctl\" or extra == \"all\"" +files = [ + {file = "jinja2-ansible-filters-1.3.2.tar.gz", hash = "sha256:07c10cf44d7073f4f01102ca12d9a2dc31b41d47e4c61ed92ef6a6d2669b356b"}, + {file = "jinja2_ansible_filters-1.3.2-py3-none-any.whl", hash = "sha256:e1082f5564917649c76fed239117820610516ec10f87735d0338688800a55b34"}, +] + +[package.dependencies] +Jinja2 = "*" +PyYAML = "*" + +[package.extras] +test = ["pytest", "pytest-cov"] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -1089,11 +1168,12 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] +markers = {main = "extra == \"ctl\" or extra == \"all\""} [[package]] name = "pexpect" @@ -1113,20 +1193,21 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.3.3" +version = "4.3.8" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.8" -groups = ["dev"] +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "platformdirs-4.3.3-py3-none-any.whl", hash = "sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5"}, - {file = "platformdirs-4.3.3.tar.gz", hash = "sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0"}, + {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, + {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, ] +markers = {main = "extra == \"ctl\" or extra == \"all\""} [package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] [[package]] name = "pluggy" @@ -1144,6 +1225,28 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "plumbum" +version = "1.9.0" +description = "Plumbum: shell combinators library" +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"ctl\" or extra == \"all\"" +files = [ + {file = "plumbum-1.9.0-py3-none-any.whl", hash = "sha256:9fd0d3b0e8d86e4b581af36edf3f3bbe9d1ae15b45b8caab28de1bcb27aaa7f5"}, + {file = "plumbum-1.9.0.tar.gz", hash = "sha256:e640062b72642c3873bd5bdc3effed75ba4d3c70ef6b6a7b907357a84d909219"}, +] + +[package.dependencies] +pywin32 = {version = "*", markers = "platform_system == \"Windows\" and platform_python_implementation != \"PyPy\""} + +[package.extras] +dev = ["coverage[toml]", "paramiko", "psutil", "pytest (>=6.0)", "pytest-cov", "pytest-mock", "pytest-timeout"] +docs = ["sphinx (>=4.0.0)", "sphinx-rtd-theme (>=1.0.0)"] +ssh = ["paramiko"] +test = ["coverage[toml]", "paramiko", "psutil", "pytest (>=6.0)", "pytest-cov", "pytest-mock", "pytest-timeout"] + [[package]] name = "pprintpp" version = "0.4.0" @@ -1181,11 +1284,12 @@ version = "3.0.47" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, ] +markers = {main = "extra == \"ctl\" or extra == \"all\""} [package.dependencies] wcwidth = "*" @@ -1608,8 +1712,7 @@ version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" -groups = ["dev"] -markers = "python_version >= \"3.10\" and sys_platform == \"win32\"" +groups = ["main", "dev"] files = [ {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, @@ -1630,6 +1733,7 @@ files = [ {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] +markers = {main = "platform_system == \"Windows\" and platform_python_implementation != \"PyPy\" and (extra == \"ctl\" or extra == \"all\")", dev = "python_version >= \"3.10\" and sys_platform == \"win32\""} [[package]] name = "pyyaml" @@ -1695,6 +1799,22 @@ files = [ ] markers = {main = "extra == \"ctl\" or extra == \"tests\" or extra == \"all\""} +[[package]] +name = "questionary" +version = "2.1.0" +description = "Python library to build pretty command line user prompts ⭐️" +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"ctl\" or extra == \"all\"" +files = [ + {file = "questionary-2.1.0-py3-none-any.whl", hash = "sha256:44174d237b68bc828e4878c763a9ad6790ee61990e0ae72927694ead57bab8ec"}, + {file = "questionary-2.1.0.tar.gz", hash = "sha256:6302cdd645b19667d8f6e6634774e9538bfcd1aad9be287e743d96cacaf95587"}, +] + +[package.dependencies] +prompt_toolkit = ">=2.0,<4.0" + [[package]] name = "requests" version = "2.32.3" @@ -2180,11 +2300,12 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] +markers = {main = "extra == \"ctl\" or extra == \"all\""} [[package]] name = "whenever" @@ -2387,11 +2508,11 @@ test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.funct type = ["pytest-mypy"] [extras] -all = ["Jinja2", "click", "numpy", "numpy", "pyarrow", "pytest", "pyyaml", "rich", "toml", "typer"] -ctl = ["Jinja2", "click", "numpy", "numpy", "pyarrow", "pyyaml", "rich", "toml", "typer"] +all = ["Jinja2", "click", "copier", "numpy", "numpy", "pyarrow", "pytest", "pyyaml", "rich", "toml", "typer"] +ctl = ["Jinja2", "click", "copier", "numpy", "numpy", "pyarrow", "pyyaml", "rich", "toml", "typer"] tests = ["Jinja2", "pytest", "pyyaml", "rich"] [metadata] lock-version = "2.1" python-versions = "^3.9, <3.14" -content-hash = "978a8ed3c6f4f4e46d39b8c33affb767a91275ee2bee532a48a7abc3d224deb8" +content-hash = "110653882a7abfb7d9597d6ffa77d455ab93eca8aec08f6b505a1dd12baaec6c" diff --git a/pyproject.toml b/pyproject.toml index f6b4f7e3..a55539b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "infrahub-sdk" -version = "1.13.2" +version = "1.13.5" description = "Python Client to interact with Infrahub" authors = ["OpsMill "] readme = "README.md" @@ -45,6 +45,7 @@ dulwich = "^0.21.4" whenever = ">=0.7.2,<0.8.0" netutils = "^1.0.0" click = { version = "8.1.*", optional = true } +copier = { version = "^9.8.0", optional = true } [tool.poetry.group.dev.dependencies] pytest = "*" @@ -69,7 +70,7 @@ infrahub-testcontainers = { version = "^1.2.5", python = ">=3.10" } astroid = "~3.1" [tool.poetry.extras] -ctl = ["Jinja2", "numpy", "pyarrow", "pyyaml", "rich", "toml", "typer", "click"] +ctl = ["Jinja2", "numpy", "pyarrow", "pyyaml", "rich", "toml", "typer", "click", "copier"] tests = ["Jinja2", "pytest", "pyyaml", "rich"] all = [ "Jinja2", @@ -81,6 +82,7 @@ all = [ "toml", "typer", "click", + "copier", ] [tool.poetry.scripts] @@ -113,10 +115,6 @@ disallow_untyped_defs = true module = "infrahub_sdk.ctl.check" disable_error_code = ["call-overload"] -[[tool.mypy.overrides]] -module = "infrahub_sdk.ctl.schema" -disable_error_code = ["arg-type", "attr-defined", "misc", "union-attr"] - [[tool.mypy.overrides]] module = "infrahub_sdk.utils" disable_error_code = ["arg-type", "attr-defined", "return-value", "union-attr"] diff --git a/tasks.py b/tasks.py index 62ad2299..e10ca2c1 100644 --- a/tasks.py +++ b/tasks.py @@ -201,7 +201,7 @@ def lint_vale(context: Context) -> None: return print(" - Check documentation style with vale") - exec_cmd = r'vale $(find ./docs -type f \( -name "*.mdx" -o -name "*.md" \))' + exec_cmd = r'vale $(find ./docs -type f \( -name "*.mdx" -o -name "*.md" \) -not -path "*/node_modules/*")' with context.cd(MAIN_DIRECTORY_PATH): context.run(exec_cmd) diff --git a/tests/fixtures/nested_spec_objects/0_folder/4_subfolder/to_be_ignored.py b/tests/fixtures/nested_spec_objects/0_folder/4_subfolder/to_be_ignored.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/nested_spec_objects/2_folder/to_be_ignored.md b/tests/fixtures/nested_spec_objects/2_folder/to_be_ignored.md new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/test_node.py b/tests/integration/test_node.py index c398d00a..9cc2ed75 100644 --- a/tests/integration/test_node.py +++ b/tests/integration/test_node.py @@ -83,6 +83,7 @@ async def test_node_create_with_relationships_using_related_node( assert node_after.name.value == node.name.value assert node_after.manufacturer.peer.id == manufacturer_mercedes.id assert node_after.owner.peer.id == person_joe.id + assert node_after.owner.peer.typename == "TestingPerson" async def test_node_update_with_original_data( self, diff --git a/tests/unit/ctl/test_repository_app.py b/tests/unit/ctl/test_repository_app.py index ba0a7721..f8636824 100644 --- a/tests/unit/ctl/test_repository_app.py +++ b/tests/unit/ctl/test_repository_app.py @@ -1,8 +1,11 @@ """Integration tests for infrahubctl commands.""" +import tempfile +from pathlib import Path from unittest import mock import pytest +import yaml from typer.testing import CliRunner from infrahub_sdk.client import InfrahubClient @@ -322,3 +325,73 @@ def test_repo_list(self, mock_repositories_list) -> None: result = runner.invoke(app, ["repository", "list"]) assert result.exit_code == 0 assert strip_color(result.stdout) == read_fixture("output.txt", "integration/test_infrahubctl/repository_list") + + def test_repo_init(self) -> None: + """Test the repository init command.""" + with ( + tempfile.TemporaryDirectory() as temp_dst, + tempfile.NamedTemporaryFile(mode="w", suffix=".yml", delete=False, encoding="utf-8") as temp_yaml, + ): + dst = Path(temp_dst) + yaml_path = Path(temp_yaml.name) + commit = "v0.0.1" + + answers = { + "generators": True, + "menus": True, + "project_name": "test", + "queries": True, + "scripts": True, + "tests": True, + "transforms": True, + "package_mode": False, + "_commit": commit, + } + + yaml.safe_dump(answers, temp_yaml) + temp_yaml.close() + runner.invoke(app, ["repository", "init", str(dst), "--data", str(yaml_path), "--vcs-ref", commit]) + coppied_answers = yaml.safe_load((dst / ".copier-answers.yml").read_text()) + coppied_answers.pop("_src_path") + + assert coppied_answers == answers + assert (dst / "generators").is_dir() + assert (dst / "queries").is_dir() + assert (dst / "scripts").is_dir() + assert (dst / "pyproject.toml").is_file() + + def test_repo_init_local_template(self) -> None: + """Test the repository init command with a local template.""" + with ( + tempfile.TemporaryDirectory() as temp_src, + tempfile.TemporaryDirectory() as temp_dst, + tempfile.NamedTemporaryFile(mode="w", suffix=".yml", delete=False, encoding="utf-8") as temp_yaml, + ): + src = Path(temp_src) + dst = Path(temp_dst) + + # Create a simple copier template + (src / "copier.yml").write_text("project_name:\n type: str") + template_dir = src / "{{project_name}}" + template_dir.mkdir() + (template_dir / "file.txt.jinja").write_text("Hello {{ project_name }}") + + # Create answers file + yaml_path = Path(temp_yaml.name) + answers = {"project_name": "local-test"} + yaml.safe_dump(answers, temp_yaml) + temp_yaml.close() + + # Run the command + result = runner.invoke( + app, ["repository", "init", str(dst), "--template", str(src), "--data", str(yaml_path)] + ) + + assert result.exit_code == 0, result.stdout + + # Check the output + project_dir = dst / "local-test" + assert project_dir.is_dir() + output_file = project_dir / "file.txt" + assert output_file.is_file() + assert output_file.read_text() == "Hello local-test" diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index 2938d4a5..4df1bd6a 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -196,7 +196,13 @@ async def test_init_node_data_user_with_relationships(client, location_schema: N @pytest.mark.parametrize("client_type", client_types) -@pytest.mark.parametrize("rel_data", [{"id": "pppppppp"}, {"hfid": ["pppp", "pppp"]}]) +@pytest.mark.parametrize( + "rel_data", + [ + {"id": "pppppppp", "__typename": "BuiltinTag"}, + {"hfid": ["pppp", "pppp"], "display_label": "mmmm", "kind": "BuiltinTag"}, + ], +) async def test_init_node_data_user_with_relationships_using_related_node( client, location_schema: NodeSchemaAPI, client_type, rel_data ): @@ -231,6 +237,9 @@ async def test_init_node_data_user_with_relationships_using_related_node( assert isinstance(node.primary_tag, RelatedNodeBase) assert node.primary_tag.id == rel_data.get("id") assert node.primary_tag.hfid == rel_data.get("hfid") + assert node.primary_tag.typename == rel_data.get("__typename") + assert node.primary_tag.kind == rel_data.get("kind") + assert node.primary_tag.display_label == rel_data.get("display_label") keys = dir(node) assert "name" in keys @@ -1874,6 +1883,19 @@ async def test_node_fetch_relationship( ) response2 = { + "data": { + "BuiltinTag": { + "count": 1, + } + } + } + + httpx_mock.add_response( + method="POST", + json=response2, + ) + + response3 = { "data": { "BuiltinTag": { "count": 1, @@ -1886,7 +1908,7 @@ async def test_node_fetch_relationship( httpx_mock.add_response( method="POST", - json=response2, + json=response3, match_headers={"X-Infrahub-Tracker": "query-builtintag-page1"}, ) @@ -1938,23 +1960,52 @@ async def test_node_IPNetwork_deserialization(client, ipnetwork_schema, client_t @pytest.mark.parametrize("client_type", client_types) -async def test_node_extract(client, location_schema, location_data01, client_type): +async def test_get_flat_value( + httpx_mock: HTTPXMock, mock_schema_query_01, clients, location_schema, location_data01, client_type +): + httpx_mock.add_response( + method="POST", + json={"data": {"BuiltinTag": {"count": 1, "edges": [location_data01["node"]["primary_tag"]]}}}, + match_headers={"X-Infrahub-Tracker": "query-builtintag-page1"}, + is_reusable=True, + ) + if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=location_data01) + tag = InfrahubNode(client=clients.standard, schema=location_schema, data=location_data01) + assert await tag.get_flat_value(key="name__value") == "DFW" + assert await tag.get_flat_value(key="primary_tag__display_label") == "red" + assert await tag.get_flat_value(key="primary_tag.display_label", separator=".") == "red" + + with pytest.raises(ValueError, match="Can only look up flat value for relationships of cardinality one"): + assert await tag.get_flat_value(key="tags__display_label") == "red" else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=location_data01) + tag = InfrahubNodeSync(client=clients.sync, schema=location_schema, data=location_data01) + assert tag.get_flat_value(key="name__value") == "DFW" + assert tag.get_flat_value(key="primary_tag__display_label") == "red" + assert tag.get_flat_value(key="primary_tag.display_label", separator=".") == "red" - params = { - "identifier": "id", - "name": "name__value", - "description": "description__value", - } + with pytest.raises(ValueError, match="Can only look up flat value for relationships of cardinality one"): + assert tag.get_flat_value(key="tags__display_label") == "red" - assert node.extract(params=params) == { - "description": None, - "identifier": "llllllll-llll-llll-llll-llllllllllll", - "name": "DFW", - } + +@pytest.mark.parametrize("client_type", client_types) +async def test_node_extract(clients, location_schema, location_data01, client_type): + params = {"identifier": "id", "name": "name__value", "description": "description__value"} + if client_type == "standard": + node = InfrahubNode(client=clients.standard, schema=location_schema, data=location_data01) + assert await node.extract(params=params) == { + "description": None, + "identifier": "llllllll-llll-llll-llll-llllllllllll", + "name": "DFW", + } + + else: + node = InfrahubNodeSync(client=clients.sync, schema=location_schema, data=location_data01) + assert node.extract(params=params) == { + "description": None, + "identifier": "llllllll-llll-llll-llll-llllllllllll", + "name": "DFW", + } @pytest.mark.parametrize("client_type", client_types) diff --git a/tests/unit/sdk/test_utils.py b/tests/unit/sdk/test_utils.py index 88c25644..7a220cd2 100644 --- a/tests/unit/sdk/test_utils.py +++ b/tests/unit/sdk/test_utils.py @@ -6,7 +6,6 @@ from graphql import parse from whenever import Instant -from infrahub_sdk.node import InfrahubNode from infrahub_sdk.utils import ( base16decode, base16encode, @@ -19,7 +18,6 @@ duplicates, extract_fields, generate_short_id, - get_flat_value, is_valid_url, is_valid_uuid, str_to_bool, @@ -143,13 +141,6 @@ def test_base16(): assert base16decode(base16encode(1412823931503067241)) == 1412823931503067241 -def test_get_flat_value(client, tag_schema, tag_green_data): - tag = InfrahubNode(client=client, schema=tag_schema, data=tag_green_data) - assert get_flat_value(obj=tag, key="name__value") == "green" - assert get_flat_value(obj=tag, key="name__source__display_label") == "CRM" - assert get_flat_value(obj=tag, key="name.source.display_label", separator=".") == "CRM" - - def test_dict_hash(): assert dict_hash({"a": 1, "b": 2}) == "608de49a4600dbb5b173492759792e4a" assert dict_hash({"b": 2, "a": 1}) == "608de49a4600dbb5b173492759792e4a" diff --git a/tests/unit/sdk/test_yaml.py b/tests/unit/sdk/test_yaml.py index 3532265c..af5a8591 100644 --- a/tests/unit/sdk/test_yaml.py +++ b/tests/unit/sdk/test_yaml.py @@ -1,5 +1,9 @@ from pathlib import Path +import pytest + +from infrahub_sdk.exceptions import FileNotValidError +from infrahub_sdk.utils import get_fixtures_dir from infrahub_sdk.yaml import YamlFile here = Path(__file__).parent.resolve() @@ -42,3 +46,9 @@ def test_read_multiple_files_invalid() -> None: assert yaml_files[0].valid is True assert yaml_files[1].document_position == 2 assert yaml_files[1].valid is False + + +def test_load_non_existing_folder(): + with pytest.raises(FileNotValidError) as exc: + YamlFile.load_from_disk(paths=[get_fixtures_dir() / "does_not_exist"]) + assert "does not exist" in str(exc.value)