diff --git a/.vale/styles/spelling-exceptions.txt b/.vale/styles/spelling-exceptions.txt
index d018438b..904fc251 100644
--- a/.vale/styles/spelling-exceptions.txt
+++ b/.vale/styles/spelling-exceptions.txt
@@ -85,6 +85,7 @@ namespace
namespaces
Nautobot
Netbox
+Netutils
Newsfragment
Nornir
npm
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b8e9608e..d4f8a7d5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -11,6 +11,23 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang
+## [1.10.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.10.0) - 2025-04-01
+
+### Deprecated
+
+- The method `get_by_hfid` on the object Store has been deprecated, use `get(key=[hfid])` instead
+- Using a Store without specifying a default branch is now deprecated and will be removed in a future version.
+
+### Added
+
+- All nodes generated by the SDK will now be assigned an `internal_id` (`_internal_id`). This ID has no significance outside of the SDK.
+- Jinja2 templating has been refactored to allow for filters within Infrahub. Builtin filters as well as those from Netutils are available.
+- The object store has been refactored to support more use cases in the future and it now properly support branches.
+
+### Fixed
+
+- Fix node processing, when using fragment with `prefetch_relationships`. ([#331](https://github.com/opsmill/infrahub-sdk-python/issues/331))
+
## [1.9.2](https://github.com/opsmill/infrahub-sdk-python/tree/v1.9.2) - 2025-03-26
### Changed
diff --git a/changelog/331.fixed.md b/changelog/331.fixed.md
deleted file mode 100644
index e67d6182..00000000
--- a/changelog/331.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fix node processing, when using fragment with `prefetch_relationships`.
\ No newline at end of file
diff --git a/docs/_templates/sdk_template_reference.j2 b/docs/_templates/sdk_template_reference.j2
new file mode 100644
index 00000000..dcd59d78
--- /dev/null
+++ b/docs/_templates/sdk_template_reference.j2
@@ -0,0 +1,27 @@
+---
+title: Python SDK Templating
+---
+Filters can be used when defining [computed attributes](https://docs.infrahub.app/guides/computed-attributes) or [Jinja2 Transforms](https://docs.infrahub.app/guides/jinja2-transform) within Infrahub.
+
+## Builtin Jinja2 filters
+
+The following filters are those that are [shipped with Jinja2](https://jinja.palletsprojects.com/en/stable/templates/#list-of-builtin-filters) and enabled within Infrahub. The trusted column indicates if the filter is allowed for use with Infrahub's computed attributes when the server is configured in strict mode.
+
+
+| Name | Trusted |
+|----------|----------|
+{% for filter in builtin %}
+| {{ filter.name }} | {% if filter.trusted %}✅{% else %}❌{% endif %} |
+{% endfor %}
+
+
+## Netutils filters
+
+The following Jinja2 filters from Netutils are included within Infrahub.
+
+| Name | Trusted |
+|----------|----------|
+{% for filter in netutils %}
+| {{ filter.name }} | {% if filter.trusted %}✅{% else %}❌{% endif %} |
+{% endfor %}
+
diff --git a/docs/docs/python-sdk/introduction.mdx b/docs/docs/python-sdk/introduction.mdx
index 9a9b3bb9..699eef8c 100644
--- a/docs/docs/python-sdk/introduction.mdx
+++ b/docs/docs/python-sdk/introduction.mdx
@@ -4,7 +4,7 @@ title: Python SDK
The Infrahub Python SDK greatly simplifies how you can interact with Infrahub programmatically.
-## Blog Posts
+## Blog posts
- [Querying Data in Infrahub via the Python SDK](https://www.opsmill.com/querying-data-in-infrahub-via-the-python-sdk/)
diff --git a/docs/docs/python-sdk/reference/templating.mdx b/docs/docs/python-sdk/reference/templating.mdx
new file mode 100644
index 00000000..62f1b8aa
--- /dev/null
+++ b/docs/docs/python-sdk/reference/templating.mdx
@@ -0,0 +1,153 @@
+---
+title: Python SDK Templating
+---
+Filters can be used when defining [computed attributes](https://docs.infrahub.app/guides/computed-attributes) or [Jinja2 Transforms](https://docs.infrahub.app/guides/jinja2-transform) within Infrahub.
+
+## Builtin Jinja2 filters
+
+The following filters are those that are [shipped with Jinja2](https://jinja.palletsprojects.com/en/stable/templates/#list-of-builtin-filters) and enabled within Infrahub. The trusted column indicates if the filter is allowed for use with Infrahub's computed attributes when the server is configured in strict mode.
+
+
+| Name | Trusted |
+|----------|----------|
+| abs | ✅ |
+| attr | ❌ |
+| batch | ❌ |
+| capitalize | ✅ |
+| center | ✅ |
+| count | ✅ |
+| d | ✅ |
+| default | ✅ |
+| dictsort | ❌ |
+| e | ✅ |
+| escape | ✅ |
+| filesizeformat | ✅ |
+| first | ✅ |
+| float | ✅ |
+| forceescape | ✅ |
+| format | ✅ |
+| groupby | ❌ |
+| indent | ✅ |
+| int | ✅ |
+| items | ❌ |
+| join | ✅ |
+| last | ✅ |
+| length | ✅ |
+| list | ✅ |
+| lower | ✅ |
+| map | ❌ |
+| max | ✅ |
+| min | ✅ |
+| pprint | ❌ |
+| random | ❌ |
+| reject | ❌ |
+| rejectattr | ❌ |
+| replace | ✅ |
+| reverse | ✅ |
+| round | ✅ |
+| safe | ❌ |
+| select | ❌ |
+| selectattr | ❌ |
+| slice | ✅ |
+| sort | ❌ |
+| string | ✅ |
+| striptags | ✅ |
+| sum | ✅ |
+| title | ✅ |
+| tojson | ❌ |
+| trim | ✅ |
+| truncate | ✅ |
+| unique | ❌ |
+| upper | ✅ |
+| urlencode | ✅ |
+| urlize | ❌ |
+| wordcount | ✅ |
+| wordwrap | ✅ |
+| xmlattr | ❌ |
+
+
+## Netutils filters
+
+The following Jinja2 filters from Netutils are included within Infrahub.
+
+| Name | Trusted |
+|----------|----------|
+| abbreviated_interface_name | ✅ |
+| abbreviated_interface_name_list | ✅ |
+| asn_to_int | ✅ |
+| bits_to_name | ✅ |
+| bytes_to_name | ✅ |
+| canonical_interface_name | ✅ |
+| canonical_interface_name_list | ✅ |
+| cidr_to_netmask | ✅ |
+| cidr_to_netmaskv6 | ✅ |
+| clean_config | ✅ |
+| compare_version_loose | ✅ |
+| compare_version_strict | ✅ |
+| config_compliance | ✅ |
+| config_section_not_parsed | ✅ |
+| delimiter_change | ✅ |
+| diff_network_config | ✅ |
+| feature_compliance | ✅ |
+| find_unordered_cfg_lines | ✅ |
+| fqdn_to_ip | ❌ |
+| get_all_host | ❌ |
+| get_broadcast_address | ✅ |
+| get_first_usable | ✅ |
+| get_ips_sorted | ✅ |
+| get_nist_urls | ✅ |
+| get_nist_vendor_platform_urls | ✅ |
+| get_oui | ✅ |
+| get_peer_ip | ✅ |
+| get_range_ips | ✅ |
+| get_upgrade_path | ✅ |
+| get_usable_range | ✅ |
+| hash_data | ✅ |
+| int_to_asdot | ✅ |
+| interface_range_compress | ✅ |
+| interface_range_expansion | ✅ |
+| ip_addition | ✅ |
+| ip_subtract | ✅ |
+| ip_to_bin | ✅ |
+| ip_to_hex | ✅ |
+| ipaddress_address | ✅ |
+| ipaddress_interface | ✅ |
+| ipaddress_network | ✅ |
+| is_classful | ✅ |
+| is_fqdn_resolvable | ❌ |
+| is_ip | ✅ |
+| is_ip_range | ✅ |
+| is_ip_within | ✅ |
+| is_netmask | ✅ |
+| is_network | ✅ |
+| is_reversible_wildcardmask | ✅ |
+| is_valid_mac | ✅ |
+| longest_prefix_match | ✅ |
+| mac_normalize | ✅ |
+| mac_to_format | ✅ |
+| mac_to_int | ✅ |
+| mac_type | ✅ |
+| name_to_bits | ✅ |
+| name_to_bytes | ✅ |
+| name_to_name | ✅ |
+| netmask_to_cidr | ✅ |
+| netmask_to_wildcardmask | ✅ |
+| normalise_delimiter_caret_c | ✅ |
+| paloalto_panos_brace_to_set | ✅ |
+| paloalto_panos_clean_newlines | ✅ |
+| regex_findall | ❌ |
+| regex_match | ❌ |
+| regex_search | ❌ |
+| regex_split | ❌ |
+| regex_sub | ❌ |
+| sanitize_config | ✅ |
+| section_config | ✅ |
+| sort_interface_list | ✅ |
+| split_interface | ✅ |
+| uptime_seconds_to_string | ✅ |
+| uptime_string_to_seconds | ✅ |
+| version_metadata | ✅ |
+| vlanconfig_to_list | ✅ |
+| vlanlist_to_config | ✅ |
+| wildcardmask_to_netmask | ✅ |
+
\ No newline at end of file
diff --git a/docs/sidebars-python-sdk.ts b/docs/sidebars-python-sdk.ts
index 8da5a81b..7cde4058 100644
--- a/docs/sidebars-python-sdk.ts
+++ b/docs/sidebars-python-sdk.ts
@@ -38,6 +38,7 @@ const sidebars: SidebarsConfig = {
label: 'Reference',
items: [
'reference/config',
+ 'reference/templating',
],
},
],
diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py
index 1835ff00..fffa8164 100644
--- a/infrahub_sdk/client.py
+++ b/infrahub_sdk/client.py
@@ -281,7 +281,7 @@ def _initialize(self) -> None:
self.schema = InfrahubSchema(self)
self.branch = InfrahubBranchManager(self)
self.object_store = ObjectStore(self)
- self.store = NodeStore()
+ self.store = NodeStore(default_branch=self.default_branch)
self.task = InfrahubTaskManager(self)
self.concurrent_execution_limit = asyncio.Semaphore(self.max_concurrent_execution)
self._request_method: AsyncRequester = self.config.requester or self._default_request_method
@@ -840,11 +840,11 @@ async def process_non_batch() -> tuple[list[InfrahubNode], list[InfrahubNode]]:
if populate_store:
for node in nodes:
if node.id:
- self.store.set(key=node.id, node=node)
+ self.store.set(node=node)
related_nodes = list(set(related_nodes))
for node in related_nodes:
if node.id:
- self.store.set(key=node.id, node=node)
+ self.store.set(node=node)
return nodes
def clone(self) -> InfrahubClient:
@@ -1529,7 +1529,7 @@ def _initialize(self) -> None:
self.schema = InfrahubSchemaSync(self)
self.branch = InfrahubBranchManagerSync(self)
self.object_store = ObjectStoreSync(self)
- self.store = NodeStoreSync()
+ self.store = NodeStoreSync(default_branch=self.default_branch)
self.task = InfrahubTaskManagerSync(self)
self._request_method: SyncRequester = self.config.sync_requester or self._default_request_method
self.group_context = InfrahubGroupContextSync(self)
@@ -1997,11 +1997,11 @@ def process_non_batch() -> tuple[list[InfrahubNodeSync], list[InfrahubNodeSync]]
if populate_store:
for node in nodes:
if node.id:
- self.store.set(key=node.id, node=node)
+ self.store.set(node=node)
related_nodes = list(set(related_nodes))
for node in related_nodes:
if node.id:
- self.store.set(key=node.id, node=node)
+ self.store.set(node=node)
return nodes
@overload
diff --git a/infrahub_sdk/ctl/cli_commands.py b/infrahub_sdk/ctl/cli_commands.py
index 633ccd2c..0d9a850f 100644
--- a/infrahub_sdk/ctl/cli_commands.py
+++ b/infrahub_sdk/ctl/cli_commands.py
@@ -9,7 +9,6 @@
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Optional
-import jinja2
import typer
import ujson
from rich.console import Console
@@ -18,7 +17,6 @@
from rich.panel import Panel
from rich.pretty import Pretty
from rich.table import Table
-from rich.traceback import Traceback
from .. import __version__ as sdk_version
from ..async_typer import AsyncTyper
@@ -31,7 +29,7 @@
from ..ctl.generator import run as run_generator
from ..ctl.menu import app as menu_app
from ..ctl.object import app as object_app
-from ..ctl.render import list_jinja2_transforms
+from ..ctl.render import list_jinja2_transforms, print_template_errors
from ..ctl.repository import app as repository_app
from ..ctl.repository import get_repository_config
from ..ctl.schema import app as schema_app
@@ -44,8 +42,9 @@
)
from ..ctl.validate import app as validate_app
from ..exceptions import GraphQLError, ModuleImportError
-from ..jinja2 import identify_faulty_jinja_code
from ..schema import MainSchemaTypesAll, SchemaRoot
+from ..template import Jinja2Template
+from ..template.exceptions import JinjaTemplateError
from ..utils import get_branch, write_to_file
from ..yaml import SchemaFile
from .exporter import dump
@@ -168,43 +167,28 @@ async def run(
raise typer.Abort(f"Unable to Load the method {method} in the Python script at {script}")
client = initialize_client(
- branch=branch, timeout=timeout, max_concurrent_execution=concurrent, identifier=module_name
+ branch=branch,
+ timeout=timeout,
+ max_concurrent_execution=concurrent,
+ identifier=module_name,
)
func = getattr(module, method)
await func(client=client, log=log, branch=branch, **variables_dict)
-def render_jinja2_template(template_path: Path, variables: dict[str, str], data: dict[str, Any]) -> str:
- if not template_path.is_file():
- console.print(f"[red]Unable to locate the template at {template_path}")
- raise typer.Exit(1)
-
- templateLoader = jinja2.FileSystemLoader(searchpath=".")
- templateEnv = jinja2.Environment(loader=templateLoader, trim_blocks=True, lstrip_blocks=True)
- template = templateEnv.get_template(str(template_path))
-
+async def render_jinja2_template(template_path: Path, variables: dict[str, Any], data: dict[str, Any]) -> str:
+ variables["data"] = data
+ jinja_template = Jinja2Template(template=Path(template_path), template_directory=Path())
try:
- rendered_tpl = template.render(**variables, data=data) # type: ignore[arg-type]
- except jinja2.TemplateSyntaxError as exc:
- console.print("[red]Syntax Error detected on the template")
- console.print(f"[yellow] {exc}")
- raise typer.Exit(1) from exc
-
- except jinja2.UndefinedError as exc:
- console.print("[red]An error occurred while rendering the jinja template")
- traceback = Traceback(show_locals=False)
- errors = identify_faulty_jinja_code(traceback=traceback)
- for frame, syntax in errors:
- console.print(f"[yellow]{frame.filename} on line {frame.lineno}\n")
- console.print(syntax)
- console.print("")
- console.print(traceback.trace.stacks[0].exc_value)
+ rendered_tpl = await jinja_template.render(variables=variables)
+ except JinjaTemplateError as exc:
+ print_template_errors(error=exc, console=console)
raise typer.Exit(1) from exc
return rendered_tpl
-def _run_transform(
+async def _run_transform(
query_name: str,
variables: dict[str, Any],
transform_func: Callable,
@@ -227,7 +211,11 @@ def _run_transform(
try:
response = execute_graphql_query(
- query=query_name, variables_dict=variables, branch=branch, debug=debug, repository_config=repository_config
+ query=query_name,
+ variables_dict=variables,
+ branch=branch,
+ debug=debug,
+ repository_config=repository_config,
)
# TODO: response is a dict and can't be printed to the console in this way.
@@ -249,7 +237,7 @@ def _run_transform(
raise typer.Abort()
if asyncio.iscoroutinefunction(transform_func):
- output = asyncio.run(transform_func(response))
+ output = await transform_func(response)
else:
output = transform_func(response)
return output
@@ -257,7 +245,7 @@ def _run_transform(
@app.command(name="render")
@catch_exception(console=console)
-def render(
+async def render(
transform_name: str = typer.Argument(default="", help="Name of the Python transformation", show_default=False),
variables: Optional[list[str]] = typer.Argument(
None, help="Variables to pass along with the query. Format key=value key=value."
@@ -289,7 +277,7 @@ def render(
transform_func = functools.partial(render_jinja2_template, transform_config.template_path, variables_dict)
# Query GQL and run the transform
- result = _run_transform(
+ result = await _run_transform(
query_name=transform_config.query,
variables=variables_dict,
transform_func=transform_func,
@@ -410,7 +398,10 @@ def version() -> None:
@app.command(name="info")
@catch_exception(console=console)
-def info(detail: bool = typer.Option(False, help="Display detailed information."), _: str = CONFIG_PARAM) -> None: # noqa: PLR0915
+def info( # noqa: PLR0915
+ detail: bool = typer.Option(False, help="Display detailed information."),
+ _: str = CONFIG_PARAM,
+) -> None:
"""Display the status of the Python SDK."""
info: dict[str, Any] = {
@@ -476,10 +467,14 @@ def info(detail: bool = typer.Option(False, help="Display detailed information."
infrahub_info = Table(show_header=False, box=None)
if info["user_info"]:
infrahub_info.add_row("User:", info["user_info"]["AccountProfile"]["display_label"])
- infrahub_info.add_row("Description:", info["user_info"]["AccountProfile"]["description"]["value"])
+ infrahub_info.add_row(
+ "Description:",
+ info["user_info"]["AccountProfile"]["description"]["value"],
+ )
infrahub_info.add_row("Status:", info["user_info"]["AccountProfile"]["status"]["label"])
infrahub_info.add_row(
- "Number of Groups:", str(info["user_info"]["AccountProfile"]["member_of_groups"]["count"])
+ "Number of Groups:",
+ str(info["user_info"]["AccountProfile"]["member_of_groups"]["count"]),
)
if groups := info["groups"]:
diff --git a/infrahub_sdk/ctl/render.py b/infrahub_sdk/ctl/render.py
index 05122102..cb1c962e 100644
--- a/infrahub_sdk/ctl/render.py
+++ b/infrahub_sdk/ctl/render.py
@@ -1,6 +1,12 @@
from rich.console import Console
from ..schema.repository import InfrahubRepositoryConfig
+from ..template.exceptions import (
+ JinjaTemplateError,
+ JinjaTemplateNotFoundError,
+ JinjaTemplateSyntaxError,
+ JinjaTemplateUndefinedError,
+)
def list_jinja2_transforms(config: InfrahubRepositoryConfig) -> None:
@@ -9,3 +15,36 @@ def list_jinja2_transforms(config: InfrahubRepositoryConfig) -> None:
for transform in config.jinja2_transforms:
console.print(f"{transform.name} ({transform.template_path})")
+
+
+def print_template_errors(error: JinjaTemplateError, console: Console) -> None:
+ if isinstance(error, JinjaTemplateNotFoundError):
+ console.print("[red]An error occurred while rendering the jinja template")
+ console.print("")
+ if error.base_template:
+ console.print(f"Base template: [yellow]{error.base_template}")
+ console.print(f"Missing template: [yellow]{error.filename}")
+ return
+
+ if isinstance(error, JinjaTemplateUndefinedError):
+ console.print("[red]An error occurred while rendering the jinja template")
+ for current_error in error.errors:
+ console.print(f"[yellow]{current_error.frame.filename} on line {current_error.frame.lineno}\n")
+ console.print(current_error.syntax)
+ console.print("")
+ console.print(error.message)
+ return
+
+ if isinstance(error, JinjaTemplateSyntaxError):
+ console.print("[red]A syntax error was encountered within the template")
+ console.print("")
+ if error.filename:
+ console.print(f"Filename: [yellow]{error.filename}")
+ console.print(f"Line number: [yellow]{error.lineno}")
+ console.print()
+ console.print(error.message)
+ return
+
+ console.print("[red]An error occurred while rendering the jinja template")
+ console.print("")
+ console.print(f"[yellow]{error.message}")
diff --git a/infrahub_sdk/exceptions.py b/infrahub_sdk/exceptions.py
index 257ce6b4..f8a5b541 100644
--- a/infrahub_sdk/exceptions.py
+++ b/infrahub_sdk/exceptions.py
@@ -69,12 +69,12 @@ def __init__(self, message: str | None = None):
class NodeNotFoundError(Error):
def __init__(
self,
- node_type: str,
identifier: Mapping[str, list[str]],
message: str = "Unable to find the node in the database.",
branch_name: str | None = None,
+ node_type: str | None = None,
):
- self.node_type = node_type
+ self.node_type = node_type or "unknown"
self.identifier = identifier
self.branch_name = branch_name
@@ -88,6 +88,10 @@ def __str__(self) -> str:
"""
+class NodeInvalidError(NodeNotFoundError):
+ pass
+
+
class ResourceNotDefinedError(Error):
"""Raised when trying to access a resource that hasn't been defined."""
diff --git a/infrahub_sdk/generator.py b/infrahub_sdk/generator.py
index e08f6642..24e4bebc 100644
--- a/infrahub_sdk/generator.py
+++ b/infrahub_sdk/generator.py
@@ -137,7 +137,7 @@ async def process_nodes(self, data: dict) -> None:
for node in self._nodes + self._related_nodes:
if node.id:
- self._init_client.store.set(key=node.id, node=node)
+ self._init_client.store.set(node=node)
@abstractmethod
async def generate(self, data: dict) -> None:
diff --git a/infrahub_sdk/node.py b/infrahub_sdk/node.py
index 45ca0bd3..6033fe3b 100644
--- a/infrahub_sdk/node.py
+++ b/infrahub_sdk/node.py
@@ -15,7 +15,7 @@
)
from .graphql import Mutation, Query
from .schema import GenericSchemaAPI, RelationshipCardinality, RelationshipKind
-from .utils import compare_lists, get_flat_value
+from .utils import compare_lists, generate_short_id, get_flat_value
from .uuidt import UUIDT
if TYPE_CHECKING:
@@ -43,6 +43,20 @@
"calling generate is only supported for CoreArtifactDefinition nodes"
)
+HFID_STR_SEPARATOR = "__"
+
+
+def parse_human_friendly_id(hfid: str | list[str]) -> tuple[str | None, list[str]]:
+ """Parse a human friendly ID into a kind and an identifier."""
+ if isinstance(hfid, str):
+ hfid_parts = hfid.split(HFID_STR_SEPARATOR)
+ if len(hfid_parts) == 1:
+ return None, hfid_parts
+ return hfid_parts[0], hfid_parts[1:]
+ if isinstance(hfid, list):
+ return None, hfid
+ raise ValueError(f"Invalid human friendly ID: {hfid}")
+
class Attribute:
"""Represents an attribute of a Node, including its schema, value, and properties."""
@@ -340,10 +354,10 @@ def get(self) -> InfrahubNode:
return self._peer # type: ignore[return-value]
if self.id and self.typename:
- return self._client.store.get(key=self.id, kind=self.typename) # type: ignore[return-value]
+ return self._client.store.get(key=self.id, kind=self.typename, branch=self._branch) # type: ignore[return-value]
if self.hfid_str:
- return self._client.store.get_by_hfid(key=self.hfid_str) # type: ignore[return-value]
+ return self._client.store.get(key=self.hfid_str, branch=self._branch) # type: ignore[return-value]
raise ValueError("Node must have at least one identifier (ID or HFID) to query it.")
@@ -387,10 +401,10 @@ def get(self) -> InfrahubNodeSync:
return self._peer # type: ignore[return-value]
if self.id and self.typename:
- return self._client.store.get(key=self.id, kind=self.typename) # type: ignore[return-value]
+ return self._client.store.get(key=self.id, kind=self.typename, branch=self._branch) # type: ignore[return-value]
if self.hfid_str:
- return self._client.store.get_by_hfid(key=self.hfid_str) # type: ignore[return-value]
+ return self._client.store.get(key=self.hfid_str, branch=self._branch) # type: ignore[return-value]
raise ValueError("Node must have at least one identifier (ID or HFID) to query it.")
@@ -678,6 +692,11 @@ def __init__(self, schema: MainSchemaTypesAPI, branch: str, data: dict | None =
self._branch = branch
self._existing: bool = True
+ # Generate a unique ID only to be used inside the SDK
+ # The format if this ID is purposely different from the ID used by the API
+ # This is done to avoid confusion and potential conflicts between the IDs
+ self._internal_id = generate_short_id()
+
self.id = data.get("id", None) if isinstance(data, dict) else None
self.display_label: str | None = data.get("display_label", None) if isinstance(data, dict) else None
self.typename: str | None = data.get("__typename", schema.kind) if isinstance(data, dict) else schema.kind
@@ -694,6 +713,9 @@ def __init__(self, schema: MainSchemaTypesAPI, branch: str, data: dict | None =
self._init_attributes(data)
self._init_relationships(data)
+ def get_branch(self) -> str:
+ return self._branch
+
def get_path_value(self, path: str) -> Any:
path_parts = path.split("__")
return_value = None
@@ -794,6 +816,11 @@ def __repr__(self) -> str:
def get_kind(self) -> str:
return self._schema.kind
+ def get_all_kinds(self) -> list[str]:
+ if hasattr(self._schema, "inherit_from"):
+ return [self._schema.kind] + self._schema.inherit_from
+ return [self._schema.kind]
+
def is_ip_prefix(self) -> bool:
builtin_ipprefix_kind = "BuiltinIPPrefix"
return self.get_kind() == builtin_ipprefix_kind or builtin_ipprefix_kind in self._schema.inherit_from # type: ignore[union-attr]
@@ -1201,7 +1228,7 @@ async def save(
else:
await self._client.group_context.add_related_nodes(ids=[self.id], update_group_context=update_group_context)
- self._client.store.set(key=self.id, node=self)
+ self._client.store.set(node=self)
async def generate_query_data(
self,
@@ -1726,7 +1753,7 @@ def save(
else:
self._client.group_context.add_related_nodes(ids=[self.id], update_group_context=update_group_context)
- self._client.store.set(key=self.id, node=self)
+ self._client.store.set(node=self)
def generate_query_data(
self,
diff --git a/infrahub_sdk/protocols_base.py b/infrahub_sdk/protocols_base.py
index c634d37f..2d533ac7 100644
--- a/infrahub_sdk/protocols_base.py
+++ b/infrahub_sdk/protocols_base.py
@@ -144,7 +144,8 @@ class AnyAttributeOptional(Attribute):
@runtime_checkable
class CoreNodeBase(Protocol):
_schema: MainSchemaTypes
- id: str
+ _internal_id: str
+ id: str # NOTE this is incorrect, should be str | None
display_label: str | None
@property
@@ -153,10 +154,16 @@ def hfid(self) -> list[str] | None: ...
@property
def hfid_str(self) -> str | None: ...
+ def get_human_friendly_id(self) -> list[str] | None: ...
+
def get_human_friendly_id_as_string(self, include_kind: bool = False) -> str | None: ...
def get_kind(self) -> str: ...
+ def get_all_kinds(self) -> list[str]: ...
+
+ def get_branch(self) -> str: ...
+
def is_ip_prefix(self) -> bool: ...
def is_ip_address(self) -> bool: ...
diff --git a/infrahub_sdk/pytest_plugin/items/jinja2_transform.py b/infrahub_sdk/pytest_plugin/items/jinja2_transform.py
index a5bba094..4ed2e2c5 100644
--- a/infrahub_sdk/pytest_plugin/items/jinja2_transform.py
+++ b/infrahub_sdk/pytest_plugin/items/jinja2_transform.py
@@ -1,51 +1,47 @@
from __future__ import annotations
+import asyncio
import difflib
+from pathlib import Path
from typing import TYPE_CHECKING, Any
import jinja2
import ujson
from httpx import HTTPStatusError
-from rich.console import Console
-from rich.traceback import Traceback
-from ...jinja2 import identify_faulty_jinja_code
-from ..exceptions import Jinja2TransformError, Jinja2TransformUndefinedError, OutputMatchError
+from ...template import Jinja2Template
+from ...template.exceptions import JinjaTemplateError
+from ..exceptions import OutputMatchError
from ..models import InfrahubInputOutputTest, InfrahubTestExpectedResult
from .base import InfrahubItem
if TYPE_CHECKING:
- from pathlib import Path
-
from pytest import ExceptionInfo
class InfrahubJinja2Item(InfrahubItem):
+ def _get_jinja2(self) -> Jinja2Template:
+ return Jinja2Template(
+ template=Path(self.resource_config.template_path), # type: ignore[attr-defined]
+ template_directory=Path(self.session.infrahub_config_path.parent), # type: ignore[attr-defined]
+ )
+
def get_jinja2_environment(self) -> jinja2.Environment:
- loader = jinja2.FileSystemLoader(self.session.infrahub_config_path.parent) # type: ignore[attr-defined]
- return jinja2.Environment(loader=loader, trim_blocks=True, lstrip_blocks=True)
+ jinja2_template = self._get_jinja2()
+ return jinja2_template.get_environment()
def get_jinja2_template(self) -> jinja2.Template:
- return self.get_jinja2_environment().get_template(str(self.resource_config.template_path)) # type: ignore[attr-defined]
+ jinja2_template = self._get_jinja2()
+ return jinja2_template.get_template()
def render_jinja2_template(self, variables: dict[str, Any]) -> str | None:
+ jinja2_template = self._get_jinja2()
+
try:
- return self.get_jinja2_template().render(**variables)
- except jinja2.UndefinedError as exc:
- traceback = Traceback(show_locals=False)
- errors = identify_faulty_jinja_code(traceback=traceback)
- console = Console()
- with console.capture() as capture:
- console.print(f"An error occurred while rendering Jinja2 transform:{self.name!r}\n", soft_wrap=True)
- console.print(f"{exc.message}\n", soft_wrap=True)
- for frame, syntax in errors:
- console.print(f"{frame.filename} on line {frame.lineno}\n", soft_wrap=True)
- console.print(syntax, soft_wrap=True)
- str_output = capture.get()
+ return asyncio.run(jinja2_template.render(variables=variables))
+ except JinjaTemplateError as exc:
if self.test.expect == InfrahubTestExpectedResult.PASS:
- raise Jinja2TransformUndefinedError(
- name=self.name, message=str_output, rtb=traceback, errors=errors
- ) from exc
+ raise exc
return None
def get_result_differences(self, computed: Any) -> str | None:
@@ -99,8 +95,8 @@ def runtest(self) -> None:
raise OutputMatchError(name=self.name, differences=differences)
def repr_failure(self, excinfo: ExceptionInfo, style: str | None = None) -> str:
- if isinstance(excinfo.value, (Jinja2TransformUndefinedError, Jinja2TransformError)):
- return excinfo.value.message
+ if isinstance(excinfo.value, (JinjaTemplateError)):
+ return str(excinfo.value.message)
return super().repr_failure(excinfo, style=style)
diff --git a/infrahub_sdk/store.py b/infrahub_sdk/store.py
index 624722e2..99659fc0 100644
--- a/infrahub_sdk/store.py
+++ b/infrahub_sdk/store.py
@@ -1,16 +1,18 @@
from __future__ import annotations
-from collections import defaultdict
-from typing import TYPE_CHECKING, Any, Literal, overload
+import warnings
+from typing import TYPE_CHECKING, Literal, overload
-from .exceptions import NodeNotFoundError
+from .exceptions import NodeInvalidError, NodeNotFoundError
+from .node import parse_human_friendly_id
if TYPE_CHECKING:
- from .client import SchemaType
+ from .client import SchemaType, SchemaTypeSync
from .node import InfrahubNode, InfrahubNodeSync
+ from .protocols_base import CoreNode, CoreNodeSync
-def get_schema_name(schema: str | type[SchemaType] | None = None) -> str | None:
+def get_schema_name(schema: type[SchemaType | SchemaTypeSync] | str | None = None) -> str | None:
if isinstance(schema, str):
return schema
@@ -20,130 +22,404 @@ def get_schema_name(schema: str | type[SchemaType] | None = None) -> str | None:
return None
-class NodeStoreBase:
- """Internal Store for InfrahubNode objects.
-
- Often while creating a lot of new objects,
- we need to save them in order to reuse them later to associate them with another node for example.
- """
+class NodeStoreBranch:
+ def __init__(self, name: str) -> None:
+ self.branch_name = name
- def __init__(self) -> None:
- self._store: dict[str, dict] = defaultdict(dict)
- self._store_by_hfid: dict[str, Any] = defaultdict(dict)
+ self._objs: dict[str, InfrahubNode | InfrahubNodeSync | CoreNode | CoreNodeSync] = {}
+ self._hfids: dict[str, dict[tuple, str]] = {}
+ self._keys: dict[str, str] = {}
+ self._uuids: dict[str, str] = {}
- def _set(self, node: InfrahubNode | InfrahubNodeSync | SchemaType, key: str | None = None) -> None:
- hfid = node.get_human_friendly_id_as_string(include_kind=True)
+ def count(self) -> int:
+ return len(self._objs)
- if not key and not hfid:
- raise ValueError("Cannot store node without human friendly ID or key.")
+ def set(self, node: InfrahubNode | InfrahubNodeSync | CoreNode | CoreNodeSync, key: str | None = None) -> None:
+ self._objs[node._internal_id] = node
if key:
- node_kind = node._schema.kind
- self._store[node_kind][key] = node
+ self._keys[key] = node._internal_id
- if hfid:
- self._store_by_hfid[hfid] = node
+ if node.id:
+ self._uuids[node.id] = node._internal_id
+
+ if hfid := node.get_human_friendly_id():
+ for kind in node.get_all_kinds():
+ if kind not in self._hfids:
+ self._hfids[kind] = {}
+ self._hfids[kind][tuple(hfid)] = node._internal_id
+
+ def get(
+ self,
+ key: str | list[str],
+ kind: type[SchemaType | SchemaTypeSync] | str | None = None,
+ raise_when_missing: bool = True,
+ ) -> InfrahubNode | InfrahubNodeSync | CoreNode | CoreNodeSync | None:
+ found_invalid = False
- def _get(self, key: str, kind: str | type[SchemaType] | None = None, raise_when_missing: bool = True): # type: ignore[no-untyped-def]
kind_name = get_schema_name(schema=kind)
- if kind_name and kind_name not in self._store and key not in self._store[kind_name]: # type: ignore[attr-defined]
- if not raise_when_missing:
- return None
+
+ if isinstance(key, list):
+ try:
+ return self._get_by_hfid(key, kind=kind_name)
+ except NodeNotFoundError:
+ pass
+
+ elif isinstance(key, str):
+ try:
+ return self._get_by_internal_id(key, kind=kind_name)
+ except NodeInvalidError:
+ found_invalid = True
+ except NodeNotFoundError:
+ pass
+
+ try:
+ return self._get_by_id(key, kind=kind_name)
+ except NodeInvalidError:
+ found_invalid = True
+ except NodeNotFoundError:
+ pass
+
+ try:
+ return self._get_by_key(key, kind=kind_name)
+ except NodeInvalidError:
+ found_invalid = True
+ except NodeNotFoundError:
+ pass
+
+ try:
+ return self._get_by_hfid(key, kind=kind_name)
+ except NodeNotFoundError:
+ pass
+
+ if not raise_when_missing:
+ return None
+
+ if kind and found_invalid:
+ raise NodeInvalidError(
+ identifier={"key": [key] if isinstance(key, str) else key},
+ message=f"Found a node of a different kind instead of {kind} for key {key!r} in the store ({self.branch_name})",
+ )
+
+ raise NodeNotFoundError(
+ identifier={"key": [key] if isinstance(key, str) else key},
+ message=f"Unable to find the node {key!r} in the store ({self.branch_name})",
+ )
+
+ def _get_by_internal_id(
+ self, internal_id: str, kind: str | None = None
+ ) -> InfrahubNode | InfrahubNodeSync | CoreNode | CoreNodeSync:
+ if internal_id not in self._objs:
+ raise NodeNotFoundError(
+ identifier={"internal_id": [internal_id]},
+ message=f"Unable to find the node {internal_id!r} in the store ({self.branch_name})",
+ )
+
+ node = self._objs[internal_id]
+ if kind and kind not in node.get_all_kinds():
+ raise NodeInvalidError(
+ node_type=kind,
+ identifier={"internal_id": [internal_id]},
+ message=f"Found a node of kind {node.get_kind()} instead of {kind} for internal_id {internal_id!r} in the store ({self.branch_name})",
+ )
+
+ return node
+
+ def _get_by_key(
+ self, key: str, kind: str | None = None
+ ) -> InfrahubNode | InfrahubNodeSync | CoreNode | CoreNodeSync:
+ if key not in self._keys:
raise NodeNotFoundError(
- node_type=kind_name,
identifier={"key": [key]},
- message="Unable to find the node in the Store",
+ message=f"Unable to find the node {key!r} in the store ({self.branch_name})",
)
- if kind_name and kind_name in self._store and key in self._store[kind_name]: # type: ignore[attr-defined]
- return self._store[kind_name][key] # type: ignore[attr-defined]
+ node = self._get_by_internal_id(self._keys[key])
- for item in self._store.values(): # type: ignore[attr-defined]
- if key in item:
- return item[key]
+ if kind and node.get_kind() != kind:
+ raise NodeInvalidError(
+ node_type=kind,
+ identifier={"key": [key]},
+ message=f"Found a node of kind {node.get_kind()} instead of {kind} for key {key!r} in the store ({self.branch_name})",
+ )
- if not raise_when_missing:
- return None
- raise NodeNotFoundError(
- node_type="n/a",
- identifier={"key": [key]},
- message=f"Unable to find the node {key!r} in the store",
+ return node
+
+ def _get_by_id(self, id: str, kind: str | None = None) -> InfrahubNode | InfrahubNodeSync | CoreNode | CoreNodeSync:
+ if id not in self._uuids:
+ raise NodeNotFoundError(
+ identifier={"id": [id]},
+ message=f"Unable to find the node {id!r} in the store ({self.branch_name})",
+ )
+
+ node = self._get_by_internal_id(self._uuids[id])
+ if kind and kind not in node.get_all_kinds():
+ raise NodeInvalidError(
+ node_type=kind,
+ identifier={"id": [id]},
+ message=f"Found a node of kind {node.get_kind()} instead of {kind} for id {id!r} in the store ({self.branch_name})",
+ )
+
+ return node
+
+ def _get_by_hfid(
+ self, hfid: str | list[str], kind: str | None = None
+ ) -> InfrahubNode | InfrahubNodeSync | CoreNode | CoreNodeSync:
+ if not kind:
+ node_kind, node_hfid = parse_human_friendly_id(hfid)
+ elif kind and isinstance(hfid, str) and hfid.startswith(kind):
+ node_kind, node_hfid = parse_human_friendly_id(hfid)
+ else:
+ node_kind = kind
+ node_hfid = [hfid] if isinstance(hfid, str) else hfid
+
+ exception_to_raise_if_not_found = NodeNotFoundError(
+ node_type=node_kind,
+ identifier={"hfid": node_hfid},
+ message=f"Unable to find the node {hfid!r} in the store ({self.branch_name})",
)
- def _get_by_hfid(self, key: str, raise_when_missing: bool = True): # type: ignore[no-untyped-def]
- try:
- return self._store_by_hfid[key]
- except KeyError as exc:
- if raise_when_missing:
- raise NodeNotFoundError(
- node_type="n/a",
- identifier={"key": [key]},
- message=f"Unable to find the node {key!r} in the store",
- ) from exc
- return None
+ if node_kind not in self._hfids:
+ raise exception_to_raise_if_not_found
+
+ if tuple(node_hfid) not in self._hfids[node_kind]:
+ raise exception_to_raise_if_not_found
+
+ internal_id = self._hfids[node_kind][tuple(node_hfid)]
+ return self._objs[internal_id]
+
+
+class NodeStoreBase:
+ """Internal Store for InfrahubNode objects.
+
+ Often while creating a lot of new objects,
+ we need to save them in order to reuse them later to associate them with another node for example.
+ """
+
+ def __init__(self, default_branch: str | None = None) -> None:
+ self._branches: dict[str, NodeStoreBranch] = {}
+
+ if default_branch is None:
+ default_branch = "main"
+ warnings.warn(
+ "Using a store without specifying a default branch is deprecated and will be removed in a future version. "
+ "Please explicitly specify a branch name.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ self._default_branch = default_branch
+
+ def _get_branch(self, branch: str | None = None) -> str:
+ return branch or self._default_branch
+
+ def _set(
+ self,
+ node: InfrahubNode | InfrahubNodeSync | SchemaType | SchemaTypeSync,
+ key: str | None = None,
+ branch: str | None = None,
+ ) -> None:
+ branch = self._get_branch(branch or node.get_branch())
+
+ if branch not in self._branches:
+ self._branches[branch] = NodeStoreBranch(name=branch)
+
+ self._branches[branch].set(node=node, key=key)
+
+ def _get( # type: ignore[no-untyped-def]
+ self,
+ key: str | list[str],
+ kind: type[SchemaType | SchemaTypeSync] | str | None = None,
+ raise_when_missing: bool = True,
+ branch: str | None = None,
+ ):
+ branch = self._get_branch(branch)
+
+ if branch not in self._branches:
+ self._branches[branch] = NodeStoreBranch(name=branch)
+
+ return self._branches[branch].get(key=key, kind=kind, raise_when_missing=raise_when_missing)
+
+ def count(self, branch: str | None = None) -> int:
+ branch = self._get_branch(branch)
+
+ if branch not in self._branches:
+ return 0
+
+ return self._branches[branch].count()
class NodeStore(NodeStoreBase):
@overload
- def get(self, key: str, kind: type[SchemaType], raise_when_missing: Literal[True] = True) -> SchemaType: ...
+ def get(
+ self,
+ key: str | list[str],
+ kind: type[SchemaType],
+ raise_when_missing: Literal[True] = True,
+ branch: str | None = ...,
+ ) -> SchemaType: ...
@overload
def get(
- self, key: str, kind: type[SchemaType], raise_when_missing: Literal[False] = False
+ self,
+ key: str | list[str],
+ kind: type[SchemaType],
+ raise_when_missing: Literal[False] = False,
+ branch: str | None = ...,
) -> SchemaType | None: ...
@overload
- def get(self, key: str, kind: type[SchemaType], raise_when_missing: bool = ...) -> SchemaType: ...
+ def get(
+ self,
+ key: str | list[str],
+ kind: type[SchemaType],
+ raise_when_missing: bool = ...,
+ branch: str | None = ...,
+ ) -> SchemaType: ...
@overload
def get(
- self, key: str, kind: str | None = ..., raise_when_missing: Literal[False] = False
- ) -> InfrahubNode | None: ...
+ self,
+ key: str | list[str],
+ kind: str | None = ...,
+ raise_when_missing: Literal[True] = True,
+ branch: str | None = ...,
+ ) -> InfrahubNode: ...
@overload
- def get(self, key: str, kind: str | None = ..., raise_when_missing: Literal[True] = True) -> InfrahubNode: ...
+ def get(
+ self,
+ key: str | list[str],
+ kind: str | None = ...,
+ raise_when_missing: Literal[False] = False,
+ branch: str | None = ...,
+ ) -> InfrahubNode | None: ...
@overload
- def get(self, key: str, kind: str | None = ..., raise_when_missing: bool = ...) -> InfrahubNode: ...
+ def get(
+ self,
+ key: str | list[str],
+ kind: str | None = ...,
+ raise_when_missing: bool = ...,
+ branch: str | None = ...,
+ ) -> InfrahubNode: ...
def get(
- self, key: str, kind: str | type[SchemaType] | None = None, raise_when_missing: bool = True
+ self,
+ key: str | list[str],
+ kind: str | type[SchemaType] | None = None,
+ raise_when_missing: bool = True,
+ branch: str | None = None,
) -> InfrahubNode | SchemaType | None:
- return self._get(key=key, kind=kind, raise_when_missing=raise_when_missing)
+ return self._get(key=key, kind=kind, raise_when_missing=raise_when_missing, branch=branch)
@overload
- def get_by_hfid(self, key: str, raise_when_missing: Literal[True] = True) -> InfrahubNode: ...
+ def get_by_hfid(
+ self, key: str | list[str], raise_when_missing: Literal[True] = True, branch: str | None = ...
+ ) -> InfrahubNode: ...
@overload
- def get_by_hfid(self, key: str, raise_when_missing: Literal[False] = False) -> InfrahubNode | None: ...
+ def get_by_hfid(
+ self, key: str | list[str], raise_when_missing: Literal[False] = False, branch: str | None = ...
+ ) -> InfrahubNode | None: ...
- def get_by_hfid(self, key: str, raise_when_missing: bool = True) -> InfrahubNode | None:
- return self._get_by_hfid(key=key, raise_when_missing=raise_when_missing)
+ def get_by_hfid(
+ self, key: str | list[str], raise_when_missing: bool = True, branch: str | None = None
+ ) -> InfrahubNode | None:
+ warnings.warn(
+ "get_by_hfid() is deprecated and will be removed in a future version. Use get() instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.get(key=key, raise_when_missing=raise_when_missing, branch=branch)
- def set(self, node: Any, key: str | None = None) -> None:
- return self._set(node=node, key=key)
+ def set(self, node: InfrahubNode | SchemaType, key: str | None = None, branch: str | None = None) -> None:
+ return self._set(node=node, key=key, branch=branch)
class NodeStoreSync(NodeStoreBase):
@overload
- def get(self, key: str, kind: str | None = None, raise_when_missing: Literal[True] = True) -> InfrahubNodeSync: ...
+ def get(
+ self,
+ key: str | list[str],
+ kind: type[SchemaTypeSync],
+ raise_when_missing: Literal[True] = True,
+ branch: str | None = ...,
+ ) -> SchemaTypeSync: ...
@overload
def get(
- self, key: str, kind: str | None = None, raise_when_missing: Literal[False] = False
+ self,
+ key: str | list[str],
+ kind: type[SchemaTypeSync],
+ raise_when_missing: Literal[False] = False,
+ branch: str | None = ...,
+ ) -> SchemaTypeSync | None: ...
+
+ @overload
+ def get(
+ self,
+ key: str | list[str],
+ kind: type[SchemaTypeSync],
+ raise_when_missing: bool = ...,
+ branch: str | None = ...,
+ ) -> SchemaTypeSync: ...
+
+ @overload
+ def get(
+ self,
+ key: str | list[str],
+ kind: str | None = ...,
+ raise_when_missing: Literal[True] = True,
+ branch: str | None = ...,
+ ) -> InfrahubNodeSync: ...
+
+ @overload
+ def get(
+ self,
+ key: str | list[str],
+ kind: str | None = ...,
+ raise_when_missing: Literal[False] = False,
+ branch: str | None = ...,
) -> InfrahubNodeSync | None: ...
- def get(self, key: str, kind: str | None = None, raise_when_missing: bool = True) -> InfrahubNodeSync | None:
- return self._get(key=key, kind=kind, raise_when_missing=raise_when_missing)
+ @overload
+ def get(
+ self,
+ key: str | list[str],
+ kind: str | None = ...,
+ raise_when_missing: bool = ...,
+ branch: str | None = ...,
+ ) -> InfrahubNodeSync: ...
+
+ def get(
+ self,
+ key: str | list[str],
+ kind: str | type[SchemaTypeSync] | None = None,
+ raise_when_missing: bool = True,
+ branch: str | None = None,
+ ) -> InfrahubNodeSync | SchemaTypeSync | None:
+ return self._get(key=key, kind=kind, raise_when_missing=raise_when_missing, branch=branch)
@overload
- def get_by_hfid(self, key: str, raise_when_missing: Literal[True] = True) -> InfrahubNodeSync: ...
+ def get_by_hfid(
+ self, key: str | list[str], raise_when_missing: Literal[True] = True, branch: str | None = ...
+ ) -> InfrahubNodeSync: ...
@overload
- def get_by_hfid(self, key: str, raise_when_missing: Literal[False] = False) -> InfrahubNodeSync | None: ...
+ def get_by_hfid(
+ self, key: str | list[str], raise_when_missing: Literal[False] = False, branch: str | None = ...
+ ) -> InfrahubNodeSync | None: ...
- def get_by_hfid(self, key: str, raise_when_missing: bool = True) -> InfrahubNodeSync | None:
- return self._get_by_hfid(key=key, raise_when_missing=raise_when_missing)
+ def get_by_hfid(
+ self, key: str | list[str], raise_when_missing: bool = True, branch: str | None = None
+ ) -> InfrahubNodeSync | None:
+ warnings.warn(
+ "get_by_hfid() is deprecated and will be removed in a future version. Use get() instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.get(key=key, raise_when_missing=raise_when_missing, branch=branch)
- def set(self, node: InfrahubNodeSync, key: str | None = None) -> None:
- return self._set(node=node, key=key)
+ def set(self, node: InfrahubNodeSync | SchemaTypeSync, key: str | None = None, branch: str | None = None) -> None:
+ return self._set(node=node, key=key, branch=branch)
diff --git a/infrahub_sdk/template/__init__.py b/infrahub_sdk/template/__init__.py
new file mode 100644
index 00000000..c43f7ad9
--- /dev/null
+++ b/infrahub_sdk/template/__init__.py
@@ -0,0 +1,209 @@
+from __future__ import annotations
+
+import linecache
+from pathlib import Path
+from typing import Any, Callable, NoReturn
+
+import jinja2
+from jinja2 import meta, nodes
+from jinja2.sandbox import SandboxedEnvironment
+from netutils.utils import jinja2_convenience_function
+from rich.syntax import Syntax
+from rich.traceback import Traceback
+
+from .exceptions import (
+ JinjaTemplateError,
+ JinjaTemplateNotFoundError,
+ JinjaTemplateOperationViolationError,
+ JinjaTemplateSyntaxError,
+ JinjaTemplateUndefinedError,
+)
+from .filters import AVAILABLE_FILTERS
+from .models import UndefinedJinja2Error
+
+netutils_filters = jinja2_convenience_function()
+
+
+class Jinja2Template:
+ def __init__(
+ self,
+ template: str | Path,
+ template_directory: Path | None = None,
+ filters: dict[str, Callable] | None = None,
+ ) -> None:
+ self.is_string_based = isinstance(template, str)
+ self.is_file_based = isinstance(template, Path)
+ self._template = str(template)
+ self._template_directory = template_directory
+ self._environment: jinja2.Environment | None = None
+
+ self._available_filters = [filter_definition.name for filter_definition in AVAILABLE_FILTERS]
+ self._trusted_filters = [
+ filter_definition.name for filter_definition in AVAILABLE_FILTERS if filter_definition.trusted
+ ]
+
+ self._filters = filters or {}
+ for user_filter in self._filters:
+ self._available_filters.append(user_filter)
+ self._trusted_filters.append(user_filter)
+
+ self._template_definition: jinja2.Template | None = None
+
+ def get_environment(self) -> jinja2.Environment:
+ if self._environment:
+ return self._environment
+
+ if self.is_string_based:
+ return self._get_string_based_environment()
+
+ return self._get_file_based_environment()
+
+ def get_template(self) -> jinja2.Template:
+ if self._template_definition:
+ return self._template_definition
+
+ try:
+ if self.is_string_based:
+ template = self._get_string_based_template()
+ else:
+ template = self._get_file_based_template()
+ except jinja2.TemplateSyntaxError as exc:
+ self._raise_template_syntax_error(error=exc)
+ except jinja2.TemplateNotFound as exc:
+ raise JinjaTemplateNotFoundError(message=exc.message, filename=str(exc.name))
+
+ return template
+
+ def get_variables(self) -> list[str]:
+ env = self.get_environment()
+
+ template_source = self._template
+ if self.is_file_based and env.loader:
+ template_source = env.loader.get_source(env, self._template)[0]
+
+ template = env.parse(template_source)
+
+ return sorted(meta.find_undeclared_variables(template))
+
+ def validate(self, restricted: bool = True) -> None:
+ allowed_list = self._available_filters
+ if restricted:
+ allowed_list = self._trusted_filters
+
+ env = self.get_environment()
+ template_source = self._template
+ if self.is_file_based and env.loader:
+ template_source = env.loader.get_source(env, self._template)[0]
+
+ template = env.parse(template_source)
+ for node in template.find_all(nodes.Filter):
+ if node.name not in allowed_list:
+ raise JinjaTemplateOperationViolationError(f"The '{node.name}' filter isn't allowed to be used")
+
+ forbidden_operations = ["Call", "Import", "Include"]
+ if self.is_string_based and any(node.__class__.__name__ in forbidden_operations for node in template.body):
+ raise JinjaTemplateOperationViolationError(
+ f"These operations are forbidden for string based templates: {forbidden_operations}"
+ )
+
+ async def render(self, variables: dict[str, Any]) -> str:
+ template = self.get_template()
+ try:
+ output = await template.render_async(variables)
+ except jinja2.exceptions.TemplateNotFound as exc:
+ raise JinjaTemplateNotFoundError(message=exc.message, filename=str(exc.name), base_template=template.name)
+ except jinja2.TemplateSyntaxError as exc:
+ self._raise_template_syntax_error(error=exc)
+ except jinja2.UndefinedError as exc:
+ traceback = Traceback(show_locals=False)
+ errors = _identify_faulty_jinja_code(traceback=traceback)
+ raise JinjaTemplateUndefinedError(message=exc.message, errors=errors)
+ except Exception as exc:
+ if error_message := getattr(exc, "message", None):
+ message = error_message
+ else:
+ message = str(exc)
+ raise JinjaTemplateError(message=message or "Unknown template error")
+
+ return output
+
+ def _get_string_based_environment(self) -> jinja2.Environment:
+ env = SandboxedEnvironment(enable_async=True, undefined=jinja2.StrictUndefined)
+ self._set_filters(env=env)
+ self._environment = env
+ return self._environment
+
+ def _get_file_based_environment(self) -> jinja2.Environment:
+ template_loader = jinja2.FileSystemLoader(searchpath=str(self._template_directory))
+ env = jinja2.Environment(
+ loader=template_loader,
+ trim_blocks=True,
+ lstrip_blocks=True,
+ enable_async=True,
+ )
+ self._set_filters(env=env)
+ self._environment = env
+ return self._environment
+
+ def _set_filters(self, env: jinja2.Environment) -> None:
+ for default_filter in list(env.filters.keys()):
+ if default_filter not in self._available_filters:
+ del env.filters[default_filter]
+
+ # Add filters from netutils
+ env.filters.update(
+ {name: jinja_filter for name, jinja_filter in netutils_filters.items() if name in self._available_filters}
+ )
+ # Add user supplied filters
+ env.filters.update(self._filters)
+
+ def _get_string_based_template(self) -> jinja2.Template:
+ env = self.get_environment()
+ self._template_definition = env.from_string(self._template)
+ return self._template_definition
+
+ def _get_file_based_template(self) -> jinja2.Template:
+ env = self.get_environment()
+ self._template_definition = env.get_template(self._template)
+ return self._template_definition
+
+ def _raise_template_syntax_error(self, error: jinja2.TemplateSyntaxError) -> NoReturn:
+ filename: str | None = None
+ if error.filename and self._template_directory:
+ filename = error.filename
+ if error.filename.startswith(str(self._template_directory)):
+ filename = error.filename[len(str(self._template_directory)) :]
+
+ raise JinjaTemplateSyntaxError(message=error.message, filename=filename, lineno=error.lineno)
+
+
+def _identify_faulty_jinja_code(traceback: Traceback, nbr_context_lines: int = 3) -> list[UndefinedJinja2Error]:
+ """This function identifies the faulty Jinja2 code and beautify it to provide meaningful information to the user.
+
+ We use the rich's Traceback to parse the complete stack trace and extract Frames for each exception found in the trace.
+ """
+ response = []
+
+ # Extract only the Jinja related exception
+ for frame in [frame for frame in traceback.trace.stacks[0].frames if not frame.filename.endswith(".py")]:
+ code = "".join(linecache.getlines(frame.filename))
+ if frame.filename == "":
+ lexer_name = "text"
+ else:
+ lexer_name = Traceback._guess_lexer(frame.filename, code)
+ syntax = Syntax(
+ code,
+ lexer_name,
+ line_numbers=True,
+ line_range=(
+ frame.lineno - nbr_context_lines,
+ frame.lineno + nbr_context_lines,
+ ),
+ highlight_lines={frame.lineno},
+ code_width=88,
+ theme=traceback.theme,
+ dedent=False,
+ )
+ response.append(UndefinedJinja2Error(frame=frame, syntax=syntax))
+
+ return response
diff --git a/infrahub_sdk/template/exceptions.py b/infrahub_sdk/template/exceptions.py
new file mode 100644
index 00000000..44fa1a1f
--- /dev/null
+++ b/infrahub_sdk/template/exceptions.py
@@ -0,0 +1,38 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from infrahub_sdk.exceptions import Error
+
+if TYPE_CHECKING:
+ from .models import UndefinedJinja2Error
+
+
+class JinjaTemplateError(Error):
+ def __init__(self, message: str) -> None:
+ self.message = message
+
+
+class JinjaTemplateNotFoundError(JinjaTemplateError):
+ def __init__(self, message: str | None, filename: str, base_template: str | None = None) -> None:
+ self.message = message or "Template Not Found"
+ self.filename = filename
+ self.base_template = base_template
+
+
+class JinjaTemplateSyntaxError(JinjaTemplateError):
+ def __init__(self, message: str | None, lineno: int, filename: str | None = None) -> None:
+ self.message = message or "Syntax Error"
+ self.filename = filename
+ self.lineno = lineno
+
+
+class JinjaTemplateUndefinedError(JinjaTemplateError):
+ def __init__(self, message: str | None, errors: list[UndefinedJinja2Error]) -> None:
+ self.message = message or "Undefined Error"
+ self.errors = errors
+
+
+class JinjaTemplateOperationViolationError(JinjaTemplateError):
+ def __init__(self, message: str | None = None) -> None:
+ self.message = message or "Forbidden code found in the template"
diff --git a/infrahub_sdk/template/filters.py b/infrahub_sdk/template/filters.py
new file mode 100644
index 00000000..1d082b39
--- /dev/null
+++ b/infrahub_sdk/template/filters.py
@@ -0,0 +1,151 @@
+from dataclasses import dataclass
+
+
+@dataclass
+class FilterDefinition:
+ name: str
+ trusted: bool
+ source: str
+
+
+BUILTIN_FILTERS = [
+ FilterDefinition(name="abs", trusted=True, source="jinja2"),
+ FilterDefinition(name="attr", trusted=False, source="jinja2"),
+ FilterDefinition(name="batch", trusted=False, source="jinja2"),
+ FilterDefinition(name="capitalize", trusted=True, source="jinja2"),
+ FilterDefinition(name="center", trusted=True, source="jinja2"),
+ FilterDefinition(name="count", trusted=True, source="jinja2"),
+ FilterDefinition(name="d", trusted=True, source="jinja2"),
+ FilterDefinition(name="default", trusted=True, source="jinja2"),
+ FilterDefinition(name="dictsort", trusted=False, source="jinja2"),
+ FilterDefinition(name="e", trusted=True, source="jinja2"),
+ FilterDefinition(name="escape", trusted=True, source="jinja2"),
+ FilterDefinition(name="filesizeformat", trusted=True, source="jinja2"),
+ FilterDefinition(name="first", trusted=True, source="jinja2"),
+ FilterDefinition(name="float", trusted=True, source="jinja2"),
+ FilterDefinition(name="forceescape", trusted=True, source="jinja2"),
+ FilterDefinition(name="format", trusted=True, source="jinja2"),
+ FilterDefinition(name="groupby", trusted=False, source="jinja2"),
+ FilterDefinition(name="indent", trusted=True, source="jinja2"),
+ FilterDefinition(name="int", trusted=True, source="jinja2"),
+ FilterDefinition(name="items", trusted=False, source="jinja2"),
+ FilterDefinition(name="join", trusted=True, source="jinja2"),
+ FilterDefinition(name="last", trusted=True, source="jinja2"),
+ FilterDefinition(name="length", trusted=True, source="jinja2"),
+ FilterDefinition(name="list", trusted=True, source="jinja2"),
+ FilterDefinition(name="lower", trusted=True, source="jinja2"),
+ FilterDefinition(name="map", trusted=False, source="jinja2"),
+ FilterDefinition(name="max", trusted=True, source="jinja2"),
+ FilterDefinition(name="min", trusted=True, source="jinja2"),
+ FilterDefinition(name="pprint", trusted=False, source="jinja2"),
+ FilterDefinition(name="random", trusted=False, source="jinja2"),
+ FilterDefinition(name="reject", trusted=False, source="jinja2"),
+ FilterDefinition(name="rejectattr", trusted=False, source="jinja2"),
+ FilterDefinition(name="replace", trusted=True, source="jinja2"),
+ FilterDefinition(name="reverse", trusted=True, source="jinja2"),
+ FilterDefinition(name="round", trusted=True, source="jinja2"),
+ FilterDefinition(name="safe", trusted=False, source="jinja2"),
+ FilterDefinition(name="select", trusted=False, source="jinja2"),
+ FilterDefinition(name="selectattr", trusted=False, source="jinja2"),
+ FilterDefinition(name="slice", trusted=True, source="jinja2"),
+ FilterDefinition(name="sort", trusted=False, source="jinja2"),
+ FilterDefinition(name="string", trusted=True, source="jinja2"),
+ FilterDefinition(name="striptags", trusted=True, source="jinja2"),
+ FilterDefinition(name="sum", trusted=True, source="jinja2"),
+ FilterDefinition(name="title", trusted=True, source="jinja2"),
+ FilterDefinition(name="tojson", trusted=False, source="jinja2"),
+ FilterDefinition(name="trim", trusted=True, source="jinja2"),
+ FilterDefinition(name="truncate", trusted=True, source="jinja2"),
+ FilterDefinition(name="unique", trusted=False, source="jinja2"),
+ FilterDefinition(name="upper", trusted=True, source="jinja2"),
+ FilterDefinition(name="urlencode", trusted=True, source="jinja2"),
+ FilterDefinition(name="urlize", trusted=False, source="jinja2"),
+ FilterDefinition(name="wordcount", trusted=True, source="jinja2"),
+ FilterDefinition(name="wordwrap", trusted=True, source="jinja2"),
+ FilterDefinition(name="xmlattr", trusted=False, source="jinja2"),
+]
+
+
+NETUTILS_FILTERS = [
+ FilterDefinition(name="abbreviated_interface_name", trusted=True, source="netutils"),
+ FilterDefinition(name="abbreviated_interface_name_list", trusted=True, source="netutils"),
+ FilterDefinition(name="asn_to_int", trusted=True, source="netutils"),
+ FilterDefinition(name="bits_to_name", trusted=True, source="netutils"),
+ FilterDefinition(name="bytes_to_name", trusted=True, source="netutils"),
+ FilterDefinition(name="canonical_interface_name", trusted=True, source="netutils"),
+ FilterDefinition(name="canonical_interface_name_list", trusted=True, source="netutils"),
+ FilterDefinition(name="cidr_to_netmask", trusted=True, source="netutils"),
+ FilterDefinition(name="cidr_to_netmaskv6", trusted=True, source="netutils"),
+ FilterDefinition(name="clean_config", trusted=True, source="netutils"),
+ FilterDefinition(name="compare_version_loose", trusted=True, source="netutils"),
+ FilterDefinition(name="compare_version_strict", trusted=True, source="netutils"),
+ FilterDefinition(name="config_compliance", trusted=True, source="netutils"),
+ FilterDefinition(name="config_section_not_parsed", trusted=True, source="netutils"),
+ FilterDefinition(name="delimiter_change", trusted=True, source="netutils"),
+ FilterDefinition(name="diff_network_config", trusted=True, source="netutils"),
+ FilterDefinition(name="feature_compliance", trusted=True, source="netutils"),
+ FilterDefinition(name="find_unordered_cfg_lines", trusted=True, source="netutils"),
+ FilterDefinition(name="fqdn_to_ip", trusted=False, source="netutils"),
+ FilterDefinition(name="get_all_host", trusted=False, source="netutils"),
+ FilterDefinition(name="get_broadcast_address", trusted=True, source="netutils"),
+ FilterDefinition(name="get_first_usable", trusted=True, source="netutils"),
+ FilterDefinition(name="get_ips_sorted", trusted=True, source="netutils"),
+ FilterDefinition(name="get_nist_urls", trusted=True, source="netutils"),
+ FilterDefinition(name="get_nist_vendor_platform_urls", trusted=True, source="netutils"),
+ FilterDefinition(name="get_oui", trusted=True, source="netutils"),
+ FilterDefinition(name="get_peer_ip", trusted=True, source="netutils"),
+ FilterDefinition(name="get_range_ips", trusted=True, source="netutils"),
+ FilterDefinition(name="get_upgrade_path", trusted=True, source="netutils"),
+ FilterDefinition(name="get_usable_range", trusted=True, source="netutils"),
+ FilterDefinition(name="hash_data", trusted=True, source="netutils"),
+ FilterDefinition(name="int_to_asdot", trusted=True, source="netutils"),
+ FilterDefinition(name="interface_range_compress", trusted=True, source="netutils"),
+ FilterDefinition(name="interface_range_expansion", trusted=True, source="netutils"),
+ FilterDefinition(name="ip_addition", trusted=True, source="netutils"),
+ FilterDefinition(name="ip_subtract", trusted=True, source="netutils"),
+ FilterDefinition(name="ip_to_bin", trusted=True, source="netutils"),
+ FilterDefinition(name="ip_to_hex", trusted=True, source="netutils"),
+ FilterDefinition(name="ipaddress_address", trusted=True, source="netutils"),
+ FilterDefinition(name="ipaddress_interface", trusted=True, source="netutils"),
+ FilterDefinition(name="ipaddress_network", trusted=True, source="netutils"),
+ FilterDefinition(name="is_classful", trusted=True, source="netutils"),
+ FilterDefinition(name="is_fqdn_resolvable", trusted=False, source="netutils"),
+ FilterDefinition(name="is_ip", trusted=True, source="netutils"),
+ FilterDefinition(name="is_ip_range", trusted=True, source="netutils"),
+ FilterDefinition(name="is_ip_within", trusted=True, source="netutils"),
+ FilterDefinition(name="is_netmask", trusted=True, source="netutils"),
+ FilterDefinition(name="is_network", trusted=True, source="netutils"),
+ FilterDefinition(name="is_reversible_wildcardmask", trusted=True, source="netutils"),
+ FilterDefinition(name="is_valid_mac", trusted=True, source="netutils"),
+ FilterDefinition(name="longest_prefix_match", trusted=True, source="netutils"),
+ FilterDefinition(name="mac_normalize", trusted=True, source="netutils"),
+ FilterDefinition(name="mac_to_format", trusted=True, source="netutils"),
+ FilterDefinition(name="mac_to_int", trusted=True, source="netutils"),
+ FilterDefinition(name="mac_type", trusted=True, source="netutils"),
+ FilterDefinition(name="name_to_bits", trusted=True, source="netutils"),
+ FilterDefinition(name="name_to_bytes", trusted=True, source="netutils"),
+ FilterDefinition(name="name_to_name", trusted=True, source="netutils"),
+ FilterDefinition(name="netmask_to_cidr", trusted=True, source="netutils"),
+ FilterDefinition(name="netmask_to_wildcardmask", trusted=True, source="netutils"),
+ FilterDefinition(name="normalise_delimiter_caret_c", trusted=True, source="netutils"),
+ FilterDefinition(name="paloalto_panos_brace_to_set", trusted=True, source="netutils"),
+ FilterDefinition(name="paloalto_panos_clean_newlines", trusted=True, source="netutils"),
+ FilterDefinition(name="regex_findall", trusted=False, source="netutils"),
+ FilterDefinition(name="regex_match", trusted=False, source="netutils"),
+ FilterDefinition(name="regex_search", trusted=False, source="netutils"),
+ FilterDefinition(name="regex_split", trusted=False, source="netutils"),
+ FilterDefinition(name="regex_sub", trusted=False, source="netutils"),
+ FilterDefinition(name="sanitize_config", trusted=True, source="netutils"),
+ FilterDefinition(name="section_config", trusted=True, source="netutils"),
+ FilterDefinition(name="sort_interface_list", trusted=True, source="netutils"),
+ FilterDefinition(name="split_interface", trusted=True, source="netutils"),
+ FilterDefinition(name="uptime_seconds_to_string", trusted=True, source="netutils"),
+ FilterDefinition(name="uptime_string_to_seconds", trusted=True, source="netutils"),
+ FilterDefinition(name="version_metadata", trusted=True, source="netutils"),
+ FilterDefinition(name="vlanconfig_to_list", trusted=True, source="netutils"),
+ FilterDefinition(name="vlanlist_to_config", trusted=True, source="netutils"),
+ FilterDefinition(name="wildcardmask_to_netmask", trusted=True, source="netutils"),
+]
+
+
+AVAILABLE_FILTERS = BUILTIN_FILTERS + NETUTILS_FILTERS
diff --git a/infrahub_sdk/template/models.py b/infrahub_sdk/template/models.py
new file mode 100644
index 00000000..e40393ab
--- /dev/null
+++ b/infrahub_sdk/template/models.py
@@ -0,0 +1,10 @@
+from dataclasses import dataclass
+
+from rich.syntax import Syntax
+from rich.traceback import Frame
+
+
+@dataclass
+class UndefinedJinja2Error:
+ frame: Frame
+ syntax: Syntax
diff --git a/infrahub_sdk/utils.py b/infrahub_sdk/utils.py
index a45a65aa..a5d6f28d 100644
--- a/infrahub_sdk/utils.py
+++ b/infrahub_sdk/utils.py
@@ -1,7 +1,9 @@
from __future__ import annotations
+import base64
import hashlib
import json
+import uuid
from itertools import groupby
from pathlib import Path
from typing import TYPE_CHECKING, Any
@@ -25,6 +27,11 @@
from whenever import TimeDelta
+def generate_short_id() -> str:
+ """Generate a short unique ID"""
+ return base64.urlsafe_b64encode(uuid.uuid4().bytes).rstrip(b"=").decode("ascii").lower()
+
+
def base36encode(number: int) -> str:
if not isinstance(number, (int)):
raise TypeError("number must be an integer")
diff --git a/poetry.lock b/poetry.lock
index 281e570f..9938fd39 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
[[package]]
name = "annotated-types"
@@ -6,6 +6,7 @@ version = "0.7.0"
description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
@@ -17,6 +18,7 @@ version = "4.4.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"},
{file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"},
@@ -30,7 +32,7 @@ typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""}
[package.extras]
doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
-test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
+test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\""]
trio = ["trio (>=0.23)"]
[[package]]
@@ -39,6 +41,7 @@ version = "3.1.0"
description = "An abstract syntax tree for Python with inference support."
optional = false
python-versions = ">=3.8.0"
+groups = ["dev"]
files = [
{file = "astroid-3.1.0-py3-none-any.whl", hash = "sha256:951798f922990137ac090c53af473db7ab4e70c770e6d7fae0cec59f74411819"},
{file = "astroid-3.1.0.tar.gz", hash = "sha256:ac248253bfa4bd924a0de213707e7ebeeb3138abeb48d798784ead1e56d419d4"},
@@ -53,6 +56,7 @@ version = "2.4.1"
description = "Annotate AST trees with source code positions"
optional = false
python-versions = "*"
+groups = ["dev"]
files = [
{file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"},
{file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"},
@@ -62,8 +66,8 @@ files = [
six = ">=1.12.0"
[package.extras]
-astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"]
-test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"]
+astroid = ["astroid (>=1,<2) ; python_version < \"3\"", "astroid (>=2,<4) ; python_version >= \"3\""]
+test = ["astroid (>=1,<2) ; python_version < \"3\"", "astroid (>=2,<4) ; python_version >= \"3\"", "pytest"]
[[package]]
name = "certifi"
@@ -71,6 +75,7 @@ version = "2024.8.30"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
+groups = ["main", "dev"]
files = [
{file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
{file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
@@ -82,6 +87,7 @@ version = "3.4.0"
description = "Validate configuration and produce human readable error messages."
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
@@ -93,6 +99,7 @@ version = "3.3.2"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
+groups = ["dev"]
files = [
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
@@ -192,10 +199,12 @@ version = "8.1.7"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
+groups = ["main", "dev"]
files = [
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
]
+markers = {main = "extra == \"ctl\" or extra == \"all\""}
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
@@ -206,10 +215,12 @@ version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+groups = ["main", "dev"]
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
+markers = {main = "(extra == \"ctl\" or extra == \"all\") and platform_system == \"Windows\" or sys_platform == \"win32\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\""}
[[package]]
name = "coverage"
@@ -217,6 +228,7 @@ version = "7.6.1"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"},
{file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"},
@@ -296,7 +308,7 @@ files = [
tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
[package.extras]
-toml = ["tomli"]
+toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
[[package]]
name = "decorator"
@@ -304,6 +316,7 @@ version = "5.1.1"
description = "Decorators for Humans"
optional = false
python-versions = ">=3.5"
+groups = ["dev"]
files = [
{file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
@@ -315,6 +328,7 @@ version = "0.3.8"
description = "Distribution utilities"
optional = false
python-versions = "*"
+groups = ["dev"]
files = [
{file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
{file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
@@ -326,6 +340,7 @@ version = "7.1.0"
description = "A Python library for the Docker Engine API."
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"},
{file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"},
@@ -348,6 +363,7 @@ version = "0.21.7"
description = "Python Git Library"
optional = false
python-versions = ">=3.7"
+groups = ["main"]
files = [
{file = "dulwich-0.21.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d4c0110798099bb7d36a110090f2688050703065448895c4f53ade808d889dd3"},
{file = "dulwich-0.21.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2bc12697f0918bee324c18836053644035362bb3983dc1b210318f2fed1d7132"},
@@ -435,6 +451,8 @@ version = "0.2.2"
description = "Like `typing._eval_type`, but lets older Python versions use newer typing features."
optional = false
python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version == \"3.9\""
files = [
{file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"},
{file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"},
@@ -449,6 +467,8 @@ version = "1.2.2"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
+groups = ["main", "dev"]
+markers = "python_version < \"3.11\""
files = [
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
@@ -463,6 +483,7 @@ version = "2.1.1"
description = "execnet: rapid multi-Python deployment"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"},
{file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"},
@@ -477,13 +498,14 @@ version = "2.1.0"
description = "Get the currently executing AST node of a frame, and other information"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"},
{file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"},
]
[package.extras]
-tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"]
+tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""]
[[package]]
name = "filelock"
@@ -491,6 +513,7 @@ version = "3.16.0"
description = "A platform independent file lock."
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"},
{file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"},
@@ -499,7 +522,7 @@ files = [
[package.extras]
docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"]
-typing = ["typing-extensions (>=4.12.2)"]
+typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""]
[[package]]
name = "graphql-core"
@@ -507,6 +530,7 @@ version = "3.2.4"
description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL."
optional = false
python-versions = "<4,>=3.6"
+groups = ["main"]
files = [
{file = "graphql-core-3.2.4.tar.gz", hash = "sha256:acbe2e800980d0e39b4685dd058c2f4042660b89ebca38af83020fd872ff1264"},
{file = "graphql_core-3.2.4-py3-none-any.whl", hash = "sha256:1604f2042edc5f3114f49cac9d77e25863be51b23a54a61a23245cf32f6476f0"},
@@ -518,6 +542,7 @@ version = "0.14.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.7"
+groups = ["main", "dev"]
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
@@ -529,6 +554,7 @@ version = "1.0.5"
description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"},
{file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"},
@@ -550,6 +576,7 @@ version = "0.27.2"
description = "The next generation HTTP client."
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"},
{file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"},
@@ -563,7 +590,7 @@ idna = "*"
sniffio = "*"
[package.extras]
-brotli = ["brotli", "brotlicffi"]
+brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
@@ -575,6 +602,7 @@ version = "2.6.0"
description = "File identification library for Python"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"},
{file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"},
@@ -589,6 +617,7 @@ version = "3.9"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
+groups = ["main", "dev"]
files = [
{file = "idna-3.9-py3-none-any.whl", hash = "sha256:69297d5da0cc9281c77efffb4e730254dd45943f45bbfb461de5991713989b1e"},
{file = "idna-3.9.tar.gz", hash = "sha256:e5c5dafde284f26e9e0f28f6ea2d6400abd5ca099864a67f576f3981c6476124"},
@@ -603,6 +632,8 @@ version = "8.5.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
+markers = "python_version == \"3.9\""
files = [
{file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
{file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
@@ -612,12 +643,12 @@ files = [
zipp = ">=3.20"
[package.extras]
-check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
perf = ["ipython"]
-test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
+test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
type = ["pytest-mypy"]
[[package]]
@@ -626,6 +657,8 @@ version = "6.4.5"
description = "Read resources from Python packages"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
+markers = "python_version == \"3.9\""
files = [
{file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"},
{file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"},
@@ -635,7 +668,7 @@ files = [
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
[package.extras]
-check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
@@ -648,6 +681,7 @@ version = "1.1.0b2"
description = "Testcontainers instance for Infrahub to easily build integration tests"
optional = false
python-versions = "<4.0,>=3.9"
+groups = ["dev"]
files = [
{file = "infrahub_testcontainers-1.1.0b2-py3-none-any.whl", hash = "sha256:40a4f735b988db0f20eeedc68eab2fc40dcfba37382d9836a49bd6dbc282b80a"},
{file = "infrahub_testcontainers-1.1.0b2.tar.gz", hash = "sha256:fd3738a8f6588c16a8d88944b8f0c9faaa3a9f390cd2817bdabc8e08d4dae6a6"},
@@ -663,6 +697,7 @@ version = "2.0.0"
description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.7"
+groups = ["main", "dev"]
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
@@ -674,6 +709,7 @@ version = "2.2.0"
description = "Pythonic task execution"
optional = false
python-versions = ">=3.6"
+groups = ["dev"]
files = [
{file = "invoke-2.2.0-py3-none-any.whl", hash = "sha256:6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820"},
{file = "invoke-2.2.0.tar.gz", hash = "sha256:ee6cbb101af1a859c7fe84f2a264c059020b0cb7fe3535f9424300ab568f6bd5"},
@@ -685,6 +721,7 @@ version = "8.18.1"
description = "IPython: Productive Interactive Computing"
optional = false
python-versions = ">=3.9"
+groups = ["dev"]
files = [
{file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"},
{file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"},
@@ -722,6 +759,7 @@ version = "0.19.1"
description = "An autocompletion tool for Python that can be used for text editors."
optional = false
python-versions = ">=3.6"
+groups = ["dev"]
files = [
{file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"},
{file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"},
@@ -741,10 +779,12 @@ version = "3.1.6"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
+groups = ["main", "dev"]
files = [
{file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"},
{file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"},
]
+markers = {main = "extra == \"ctl\" or extra == \"tests\" or extra == \"all\""}
[package.dependencies]
MarkupSafe = ">=2.0"
@@ -758,10 +798,12 @@ version = "3.0.0"
description = "Python port of markdown-it. Markdown parsing, done right!"
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
{file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
]
+markers = {main = "extra == \"ctl\" or extra == \"tests\" or extra == \"all\""}
[package.dependencies]
mdurl = ">=0.1,<1.0"
@@ -782,6 +824,7 @@ version = "2.1.5"
description = "Safely add untrusted strings to HTML/XML markup."
optional = false
python-versions = ">=3.7"
+groups = ["main", "dev"]
files = [
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
@@ -844,6 +887,7 @@ files = [
{file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
]
+markers = {main = "extra == \"ctl\" or extra == \"tests\" or extra == \"all\""}
[[package]]
name = "matplotlib-inline"
@@ -851,6 +895,7 @@ version = "0.1.7"
description = "Inline Matplotlib backend for Jupyter"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"},
{file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"},
@@ -865,10 +910,12 @@ version = "0.1.2"
description = "Markdown URL utilities"
optional = false
python-versions = ">=3.7"
+groups = ["main", "dev"]
files = [
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
]
+markers = {main = "extra == \"ctl\" or extra == \"tests\" or extra == \"all\""}
[[package]]
name = "mypy"
@@ -876,6 +923,7 @@ version = "1.11.2"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"},
{file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"},
@@ -923,17 +971,33 @@ version = "1.0.0"
description = "Type system extensions for programs checked with the mypy type checker."
optional = false
python-versions = ">=3.5"
+groups = ["dev"]
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
+[[package]]
+name = "netutils"
+version = "1.12.0"
+description = "Common helper functions useful in network automation."
+optional = false
+python-versions = "<4.0,>=3.8"
+files = [
+ {file = "netutils-1.12.0-py3-none-any.whl", hash = "sha256:7cb37796ce86637814f8c899f64db2b054986b0eda719d3fcadc293d451a4db1"},
+ {file = "netutils-1.12.0.tar.gz", hash = "sha256:96a790d11921063a6a64ee79c6e8c5a5ffcd05cbee07dd2b614d98c4416cffdd"},
+]
+
+[package.extras]
+optionals = ["jsonschema (>=4.17.3,<5.0.0)", "legacycrypt (==0.3)", "napalm (>=4.0.0,<5.0.0)"]
+
[[package]]
name = "nodeenv"
version = "1.9.1"
description = "Node.js virtual environment builder"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+groups = ["dev"]
files = [
{file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
@@ -945,6 +1009,8 @@ version = "1.26.4"
description = "Fundamental package for array computing in Python"
optional = true
python-versions = ">=3.9"
+groups = ["main"]
+markers = "extra == \"ctl\" or extra == \"all\""
files = [
{file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
{file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
@@ -990,6 +1056,7 @@ version = "24.1"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
@@ -1001,6 +1068,7 @@ version = "0.8.4"
description = "A Python Parser"
optional = false
python-versions = ">=3.6"
+groups = ["dev"]
files = [
{file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"},
{file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"},
@@ -1016,6 +1084,7 @@ version = "0.12.1"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
@@ -1027,6 +1096,8 @@ version = "4.9.0"
description = "Pexpect allows easy control of interactive console applications."
optional = false
python-versions = "*"
+groups = ["dev"]
+markers = "sys_platform != \"win32\""
files = [
{file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"},
{file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"},
@@ -1041,6 +1112,7 @@ version = "4.3.3"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "platformdirs-4.3.3-py3-none-any.whl", hash = "sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5"},
{file = "platformdirs-4.3.3.tar.gz", hash = "sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0"},
@@ -1057,6 +1129,7 @@ version = "1.5.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
@@ -1072,6 +1145,7 @@ version = "0.4.0"
description = "A drop-in replacement for pprint that's actually pretty"
optional = false
python-versions = "*"
+groups = ["dev"]
files = [
{file = "pprintpp-0.4.0-py2.py3-none-any.whl", hash = "sha256:b6b4dcdd0c0c0d75e4d7b2f21a9e933e5b2ce62b26e1a54537f9651ae5a5c01d"},
{file = "pprintpp-0.4.0.tar.gz", hash = "sha256:ea826108e2c7f49dc6d66c752973c3fc9749142a798d6b254e1e301cfdbc6403"},
@@ -1083,6 +1157,7 @@ version = "2.21.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"},
{file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"},
@@ -1101,6 +1176,7 @@ version = "3.0.47"
description = "Library for building powerful interactive command lines in Python"
optional = false
python-versions = ">=3.7.0"
+groups = ["dev"]
files = [
{file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"},
{file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"},
@@ -1115,6 +1191,8 @@ version = "0.7.0"
description = "Run a subprocess in a pseudo terminal"
optional = false
python-versions = "*"
+groups = ["dev"]
+markers = "sys_platform != \"win32\""
files = [
{file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
{file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
@@ -1126,6 +1204,7 @@ version = "0.2.3"
description = "Safely evaluate AST nodes without side effects"
optional = false
python-versions = "*"
+groups = ["dev"]
files = [
{file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"},
{file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"},
@@ -1140,6 +1219,8 @@ version = "18.1.0"
description = "Python library for Apache Arrow"
optional = true
python-versions = ">=3.9"
+groups = ["main"]
+markers = "extra == \"ctl\" or extra == \"all\""
files = [
{file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e21488d5cfd3d8b500b3238a6c4b075efabc18f0f6d80b29239737ebd69caa6c"},
{file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b516dad76f258a702f7ca0250885fc93d1fa5ac13ad51258e39d402bd9e2e1e4"},
@@ -1194,6 +1275,7 @@ version = "2.9.1"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"},
{file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"},
@@ -1209,7 +1291,7 @@ typing-extensions = [
[package.extras]
email = ["email-validator (>=2.0.0)"]
-timezone = ["tzdata"]
+timezone = ["tzdata ; python_version >= \"3.9\" and sys_platform == \"win32\""]
[[package]]
name = "pydantic-core"
@@ -1217,6 +1299,7 @@ version = "2.23.3"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"},
{file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"},
@@ -1318,6 +1401,7 @@ version = "2.5.2"
description = "Settings management using Pydantic"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "pydantic_settings-2.5.2-py3-none-any.whl", hash = "sha256:2c912e55fd5794a59bf8c832b9de832dcfdf4778d79ff79b708744eed499a907"},
{file = "pydantic_settings-2.5.2.tar.gz", hash = "sha256:f90b139682bee4d2065273d5185d71d37ea46cfe57e1b5ae184fc6a0b2484ca0"},
@@ -1338,10 +1422,12 @@ version = "2.18.0"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
{file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
]
+markers = {main = "extra == \"ctl\" or extra == \"tests\" or extra == \"all\""}
[package.extras]
windows-terminal = ["colorama (>=0.4.6)"]
@@ -1352,6 +1438,7 @@ version = "8.3.3"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
{file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
@@ -1374,6 +1461,7 @@ version = "0.21.2"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"},
{file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"},
@@ -1392,6 +1480,7 @@ version = "1.0.1"
description = "A plugin providing an alternative, colourful diff output for failing assertions."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+groups = ["dev"]
files = [
{file = "pytest-clarity-1.0.1.tar.gz", hash = "sha256:505fe345fad4fe11c6a4187fe683f2c7c52c077caa1e135f3e483fe112db7772"},
]
@@ -1407,6 +1496,7 @@ version = "4.1.0"
description = "Pytest plugin for measuring coverage."
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
{file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
@@ -1425,6 +1515,7 @@ version = "0.30.0"
description = "Send responses to httpx."
optional = false
python-versions = ">=3.9"
+groups = ["dev"]
files = [
{file = "pytest-httpx-0.30.0.tar.gz", hash = "sha256:755b8edca87c974dd4f3605c374fda11db84631de3d163b99c0df5807023a19a"},
{file = "pytest_httpx-0.30.0-py3-none-any.whl", hash = "sha256:6d47849691faf11d2532565d0c8e0e02b9f4ee730da31687feae315581d7520c"},
@@ -1443,6 +1534,7 @@ version = "3.6.1"
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"},
{file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"},
@@ -1463,6 +1555,7 @@ version = "1.0.1"
description = "Read key-value pairs from a .env file and set them as environment variables"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
@@ -1477,6 +1570,8 @@ version = "308"
description = "Python for Window Extensions"
optional = false
python-versions = "*"
+groups = ["dev"]
+markers = "sys_platform == \"win32\""
files = [
{file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"},
{file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"},
@@ -1504,6 +1599,7 @@ version = "6.0.2"
description = "YAML parser and emitter for Python"
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
@@ -1559,6 +1655,7 @@ files = [
{file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
]
+markers = {main = "extra == \"ctl\" or extra == \"tests\" or extra == \"all\""}
[[package]]
name = "requests"
@@ -1566,6 +1663,7 @@ version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
@@ -1587,10 +1685,12 @@ version = "13.8.1"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
optional = false
python-versions = ">=3.7.0"
+groups = ["main", "dev"]
files = [
{file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"},
{file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"},
]
+markers = {main = "extra == \"ctl\" or extra == \"tests\" or extra == \"all\""}
[package.dependencies]
markdown-it-py = ">=2.2.0"
@@ -1605,6 +1705,7 @@ version = "0.11.0"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "ruff-0.11.0-py3-none-linux_armv6l.whl", hash = "sha256:dc67e32bc3b29557513eb7eeabb23efdb25753684b913bebb8a0c62495095acb"},
{file = "ruff-0.11.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:38c23fd9bdec4eb437b4c1e3595905a0a8edfccd63a790f818b28c78fe345639"},
@@ -1632,6 +1733,8 @@ version = "1.5.4"
description = "Tool to Detect Surrounding Shell"
optional = true
python-versions = ">=3.7"
+groups = ["main"]
+markers = "extra == \"ctl\" or extra == \"all\""
files = [
{file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"},
{file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"},
@@ -1643,6 +1746,7 @@ version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+groups = ["dev"]
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
@@ -1654,6 +1758,7 @@ version = "1.3.1"
description = "Sniff out which async library your code is running under"
optional = false
python-versions = ">=3.7"
+groups = ["main", "dev"]
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
@@ -1665,6 +1770,7 @@ version = "0.6.3"
description = "Extract data from python stack frames and tracebacks for informative displays"
optional = false
python-versions = "*"
+groups = ["dev"]
files = [
{file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"},
{file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"},
@@ -1684,6 +1790,7 @@ version = "4.8.2"
description = "Python library for throwaway instances of anything that can run in a Docker container"
optional = false
python-versions = "<4.0,>=3.9"
+groups = ["dev"]
files = [
{file = "testcontainers-4.8.2-py3-none-any.whl", hash = "sha256:9e19af077cd96e1957c13ee466f1f32905bc6c5bc1bc98643eb18be1a989bfb0"},
{file = "testcontainers-4.8.2.tar.gz", hash = "sha256:dd4a6a2ea09e3c3ecd39e180b6548105929d0bb78d665ce9919cb3f8c98f9853"},
@@ -1736,6 +1843,8 @@ version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language"
optional = true
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+groups = ["main"]
+markers = "extra == \"ctl\" or extra == \"all\""
files = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
@@ -1747,6 +1856,8 @@ version = "2.0.1"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.7"
+groups = ["main", "dev"]
+markers = "python_version < \"3.11\""
files = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
@@ -1758,6 +1869,7 @@ version = "24.8.0"
description = "Building newsfiles for your project."
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "towncrier-24.8.0-py3-none-any.whl", hash = "sha256:9343209592b839209cdf28c339ba45792fbfe9775b5f9c177462fd693e127d8d"},
{file = "towncrier-24.8.0.tar.gz", hash = "sha256:013423ee7eed102b2f393c287d22d95f66f1a3ea10a4baa82d298001a7f18af3"},
@@ -1779,6 +1891,7 @@ version = "5.14.3"
description = "Traitlets Python configuration system"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"},
{file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"},
@@ -1794,6 +1907,8 @@ version = "0.12.5"
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
optional = true
python-versions = ">=3.7"
+groups = ["main"]
+markers = "extra == \"ctl\" or extra == \"all\""
files = [
{file = "typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b"},
{file = "typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722"},
@@ -1811,6 +1926,7 @@ version = "8.0.2.20240310"
description = "Typing stubs for python-slugify"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "types-python-slugify-8.0.2.20240310.tar.gz", hash = "sha256:5157b508c7fed587520c70d77f62aea0fafdc6620893c2ec8972f13a1faf5560"},
{file = "types_python_slugify-8.0.2.20240310-py3-none-any.whl", hash = "sha256:0efec18b802c69ebd22dcee55c91afaeaa80e1e40ddd66ccabf69fd42ce87b74"},
@@ -1822,6 +1938,7 @@ version = "6.0.12.20240808"
description = "Typing stubs for PyYAML"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af"},
{file = "types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35"},
@@ -1833,6 +1950,7 @@ version = "0.10.8.20240310"
description = "Typing stubs for toml"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331"},
{file = "types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d"},
@@ -1844,6 +1962,7 @@ version = "5.10.0.20240515"
description = "Typing stubs for ujson"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "types-ujson-5.10.0.20240515.tar.gz", hash = "sha256:ceae7127f0dafe4af5dd0ecf98ee13e9d75951ef963b5c5a9b7ea92e0d71f0d7"},
{file = "types_ujson-5.10.0.20240515-py3-none-any.whl", hash = "sha256:02bafc36b3a93d2511757a64ff88bd505e0a57fba08183a9150fbcfcb2015310"},
@@ -1855,6 +1974,7 @@ version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
@@ -1866,6 +1986,8 @@ version = "2024.1"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
+groups = ["main"]
+markers = "sys_platform == \"win32\""
files = [
{file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
@@ -1877,6 +1999,7 @@ version = "5.10.0"
description = "Ultra fast JSON encoder and decoder for Python"
optional = false
python-versions = ">=3.8"
+groups = ["main"]
files = [
{file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"},
{file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"},
@@ -1964,13 +2087,14 @@ version = "2.2.3"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
+groups = ["main", "dev"]
files = [
{file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
{file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
]
[package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
+brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
@@ -1981,6 +2105,7 @@ version = "20.26.6"
description = "Virtual Python Environment builder"
optional = false
python-versions = ">=3.7"
+groups = ["dev"]
files = [
{file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"},
{file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"},
@@ -1993,7 +2118,7 @@ platformdirs = ">=3.9.1,<5"
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
-test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
+test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""]
[[package]]
name = "wcwidth"
@@ -2001,6 +2126,7 @@ version = "0.2.13"
description = "Measures the displayed width of unicode strings in a terminal"
optional = false
python-versions = "*"
+groups = ["dev"]
files = [
{file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
{file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
@@ -2012,6 +2138,7 @@ version = "0.7.2"
description = "Modern datetime library for Python"
optional = false
python-versions = ">=3.9"
+groups = ["main"]
files = [
{file = "whenever-0.7.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a87864d3e7679dbedc55d3aa8c6cef5ffdc45520e16805f4c5a3cf71241fb986"},
{file = "whenever-0.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f37dc37d1bea611af16a3aaba5960038604ddfb4a592b1d72a3efccd5853b6da"},
@@ -2095,6 +2222,7 @@ version = "1.17.0"
description = "Module for decorators, wrappers and monkey patching."
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"},
{file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"},
@@ -2169,6 +2297,7 @@ version = "1.35.1"
description = "A linter for YAML files."
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
files = [
{file = "yamllint-1.35.1-py3-none-any.whl", hash = "sha256:2e16e504bb129ff515b37823b472750b36b6de07963bd74b307341ef5ad8bdc3"},
{file = "yamllint-1.35.1.tar.gz", hash = "sha256:7a003809f88324fd2c877734f2d575ee7881dd9043360657cc8049c809eba6cd"},
@@ -2187,17 +2316,19 @@ version = "3.20.2"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
+groups = ["dev"]
+markers = "python_version == \"3.9\""
files = [
{file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"},
{file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"},
]
[package.extras]
-check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
-test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
+test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
type = ["pytest-mypy"]
[extras]
@@ -2206,6 +2337,6 @@ ctl = ["Jinja2", "numpy", "numpy", "pyarrow", "pyyaml", "rich", "toml", "typer"]
tests = ["Jinja2", "pytest", "pyyaml", "rich"]
[metadata]
-lock-version = "2.0"
+lock-version = "2.1"
python-versions = "^3.9, <3.14"
-content-hash = "b3e5f33a5e7089dfb49e9d4fd41b71feba6a5f2ec50c67f18202caa973baf1b3"
+content-hash = "b2747ad942541d2b546562e33cc9cb6d84b26f3d5ca10d72e8f24f55e2a9492e"
diff --git a/pyproject.toml b/pyproject.toml
index 704d6bec..bc262f4f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "infrahub-sdk"
-version = "1.9.2"
+version = "1.10.0"
description = "Python Client to interact with Infrahub"
authors = ["OpsMill "]
readme = "README.md"
@@ -43,6 +43,7 @@ pyyaml = { version = "^6", optional = true }
eval-type-backport = { version = "^0.2.2", python = "~3.9" }
dulwich = "^0.21.4"
whenever = "0.7.2"
+netutils = "^1.0.0"
[tool.poetry.group.dev.dependencies]
pytest = "*"
diff --git a/tasks.py b/tasks.py
index cea9a59a..b5e00b17 100644
--- a/tasks.py
+++ b/tasks.py
@@ -1,3 +1,4 @@
+import asyncio
import sys
from pathlib import Path
from typing import Any
@@ -14,6 +15,7 @@ def _generate(context: Context) -> None:
"""Generate documentation output from code."""
_generate_infrahubctl_documentation(context=context)
_generate_infrahub_sdk_configuration_documentation()
+ _generate_infrahub_sdk_template_documentation()
def _generate_infrahubctl_documentation(context: Context) -> None:
@@ -89,6 +91,24 @@ def _generate_infrahub_sdk_configuration_documentation() -> None:
print(f"Docs saved to: {output_file}")
+def _generate_infrahub_sdk_template_documentation() -> None:
+ """Generate documentation for the Infrahub SDK template reference."""
+ from infrahub_sdk.template import Jinja2Template
+ from infrahub_sdk.template.filters import BUILTIN_FILTERS, NETUTILS_FILTERS
+
+ output_file = DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "reference" / "templating.mdx"
+ jinja2_template = Jinja2Template(
+ template=Path("sdk_template_reference.j2"),
+ template_directory=DOCUMENTATION_DIRECTORY / "_templates",
+ )
+
+ rendered_file = asyncio.run(
+ jinja2_template.render(variables={"builtin": BUILTIN_FILTERS, "netutils": NETUTILS_FILTERS})
+ )
+ output_file.write_text(rendered_file, encoding="utf-8")
+ print(f"Docs saved to: {output_file}")
+
+
def _get_env_vars() -> dict[str, list[str]]:
"""Retrieve environment variables for Infrahub SDK configuration."""
from collections import defaultdict
@@ -170,7 +190,7 @@ def docs_build(context: Context) -> None:
with context.cd(DOCUMENTATION_DIRECTORY):
output = context.run(exec_cmd)
- if output.exited != 0:
+ if output and output.exited != 0:
sys.exit(-1)
@@ -184,3 +204,4 @@ def generate_infrahubctl(context: Context) -> None:
def generate_python_sdk(context: Context) -> None: # noqa: ARG001
"""Generate documentation for the Python SDK."""
_generate_infrahub_sdk_configuration_documentation()
+ _generate_infrahub_sdk_template_documentation()
diff --git a/tests/fixtures/repos/missing_template_file/.infrahub.yml b/tests/fixtures/repos/missing_template_file/.infrahub.yml
new file mode 100644
index 00000000..207f5f2e
--- /dev/null
+++ b/tests/fixtures/repos/missing_template_file/.infrahub.yml
@@ -0,0 +1,18 @@
+---
+jinja2_transforms:
+ - name: tag_format_missing
+ query: "tags_query"
+ template_path: "tag_format.file-is-missing"
+ - name: undefined_variables
+ query: "tags_query"
+ template_path: "templates/undefined.j2"
+ - name: syntax_error
+ query: "tags_query"
+ template_path: "templates/syntax-error.html"
+ - name: missing_filter
+ query: "tags_query"
+ template_path: "templates/wrong-filter.j2"
+
+queries:
+ - name: tags_query
+ file_path: tags_query.gql
diff --git a/tests/fixtures/repos/missing_template_file/tags_query.gql b/tests/fixtures/repos/missing_template_file/tags_query.gql
new file mode 100644
index 00000000..6d2ea6ab
--- /dev/null
+++ b/tests/fixtures/repos/missing_template_file/tags_query.gql
@@ -0,0 +1,11 @@
+query TagsQuery($name: String!) {
+ BuiltinTag(name__value: $name) {
+ edges {
+ node {
+ name {
+ value
+ }
+ }
+ }
+ }
+}
diff --git a/tests/fixtures/repos/missing_template_file/templates/syntax-error.html b/tests/fixtures/repos/missing_template_file/templates/syntax-error.html
new file mode 100644
index 00000000..31730cc6
--- /dev/null
+++ b/tests/fixtures/repos/missing_template_file/templates/syntax-error.html
@@ -0,0 +1,5 @@
+
+
+{{ title }
+
+
\ No newline at end of file
diff --git a/tests/fixtures/repos/missing_template_file/templates/undefined.j2 b/tests/fixtures/repos/missing_template_file/templates/undefined.j2
new file mode 100644
index 00000000..fb8e02e8
--- /dev/null
+++ b/tests/fixtures/repos/missing_template_file/templates/undefined.j2
@@ -0,0 +1 @@
+hostname {{ host.name }}
\ No newline at end of file
diff --git a/tests/fixtures/repos/missing_template_file/templates/wrong-filter.j2 b/tests/fixtures/repos/missing_template_file/templates/wrong-filter.j2
new file mode 100644
index 00000000..d2a8f283
--- /dev/null
+++ b/tests/fixtures/repos/missing_template_file/templates/wrong-filter.j2
@@ -0,0 +1 @@
+{{ data|my_filter_is_missing }}
\ No newline at end of file
diff --git a/tests/fixtures/unit/test_infrahubctl/red_tags_query/red_tag.json b/tests/fixtures/unit/test_infrahubctl/red_tags_query/red_tag.json
new file mode 100644
index 00000000..51438236
--- /dev/null
+++ b/tests/fixtures/unit/test_infrahubctl/red_tags_query/red_tag.json
@@ -0,0 +1,15 @@
+{
+ "data": {
+ "BuiltinTag": {
+ "edges": [
+ {
+ "node": {
+ "name": {
+ "value": "red"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
\ No newline at end of file
diff --git a/tests/helpers/utils.py b/tests/helpers/utils.py
index 1cfd6a5c..9de4ee71 100644
--- a/tests/helpers/utils.py
+++ b/tests/helpers/utils.py
@@ -2,8 +2,13 @@
import os
import re
+import shutil
+import tempfile
from collections.abc import Generator
from contextlib import contextmanager
+from pathlib import Path
+
+from infrahub_sdk.repository import GitRepoManager
@contextmanager
@@ -22,6 +27,21 @@ def change_directory(new_directory: str) -> Generator[None, None, None]:
os.chdir(original_directory)
+@contextmanager
+def temp_repo_and_cd(source_dir: Path) -> Generator[Path, None, None]:
+ temp_dir = tempfile.mkdtemp()
+ original_directory = os.getcwd()
+
+ try:
+ shutil.copytree(source_dir, temp_dir, dirs_exist_ok=True)
+ GitRepoManager(temp_dir) # assuming this is defined elsewhere
+ os.chdir(temp_dir)
+ yield Path(temp_dir)
+ finally:
+ os.chdir(original_directory)
+ shutil.rmtree(temp_dir)
+
+
def strip_color(text: str) -> str:
ansi_escape = re.compile(r"\x1B[@-_][0-?]*[ -/]*[@-~]")
return ansi_escape.sub("", text)
diff --git a/tests/unit/ctl/test_render_app.py b/tests/unit/ctl/test_render_app.py
new file mode 100644
index 00000000..dceba985
--- /dev/null
+++ b/tests/unit/ctl/test_render_app.py
@@ -0,0 +1,75 @@
+import json
+import os
+import sys
+from dataclasses import dataclass
+from pathlib import Path
+
+import pytest
+from pytest_httpx._httpx_mock import HTTPXMock
+from typer.testing import CliRunner
+
+from infrahub_sdk.ctl.cli_commands import app
+from tests.helpers.fixtures import read_fixture
+from tests.helpers.utils import strip_color, temp_repo_and_cd
+
+runner = CliRunner()
+
+
+FIXTURE_BASE_DIR = Path(Path(os.path.abspath(__file__)).parent / ".." / ".." / "fixtures" / "repos")
+
+requires_python_310 = pytest.mark.skipif(sys.version_info < (3, 10), reason="Requires Python 3.10 or higher")
+
+
+@dataclass
+class RenderAppFailure:
+ name: str
+ template: str
+ error: str
+
+
+RENDER_APP_FAIL_TEST_CASES = [
+ RenderAppFailure(
+ name="main-template-not-found",
+ template="tag_format_missing",
+ error="Missing template: tag_format.file-is-missing",
+ ),
+ RenderAppFailure(
+ name="has-undefined-variables",
+ template="undefined_variables",
+ error="'host' is undefined",
+ ),
+ RenderAppFailure(
+ name="has-syntax-error",
+ template="syntax_error",
+ error="unexpected '}'",
+ ),
+ RenderAppFailure(
+ name="invalid-filter",
+ template="missing_filter",
+ error="No filter named 'my_filter_is_missing'.",
+ ),
+]
+
+
+@pytest.mark.parametrize(
+ "test_case",
+ [pytest.param(tc, id=tc.name) for tc in RENDER_APP_FAIL_TEST_CASES],
+)
+@requires_python_310
+def test_validate_template_not_found(test_case: RenderAppFailure, httpx_mock: HTTPXMock) -> None:
+ """Ensure that the correct errors are caught"""
+ httpx_mock.add_response(
+ method="POST",
+ url="http://mock/graphql/main",
+ json=json.loads(
+ read_fixture(
+ "red_tag.json",
+ "unit/test_infrahubctl/red_tags_query",
+ )
+ ),
+ )
+
+ with temp_repo_and_cd(source_dir=FIXTURE_BASE_DIR / "missing_template_file"):
+ output = runner.invoke(app, ["render", test_case.template, "name=red"])
+ assert test_case.error in strip_color(output.stdout)
+ assert output.exit_code == 1
diff --git a/tests/unit/sdk/test_client.py b/tests/unit/sdk/test_client.py
index a585ee87..f7bad76f 100644
--- a/tests/unit/sdk/test_client.py
+++ b/tests/unit/sdk/test_client.py
@@ -135,16 +135,16 @@ async def test_method_get_user_permissions(clients, mock_query_infrahub_user, cl
async def test_method_all_with_limit(clients, mock_query_repository_page1_2, client_type):
if client_type == "standard":
repos = await clients.standard.all(kind="CoreRepository", populate_store=False, limit=3)
- assert not clients.standard.store._store["CoreRepository"]
+ assert clients.standard.store.count() == 0
repos = await clients.standard.all(kind="CoreRepository", limit=3)
- assert len(clients.standard.store._store["CoreRepository"]) == 3
+ assert clients.standard.store.count() == 3
else:
repos = clients.sync.all(kind="CoreRepository", populate_store=False, limit=3)
- assert not clients.sync.store._store["CoreRepository"]
+ assert clients.sync.store.count() == 0
repos = clients.sync.all(kind="CoreRepository", limit=3)
- assert len(clients.sync.store._store["CoreRepository"]) == 3
+ assert clients.sync.store.count() == 3
assert len(repos) == 3
@@ -154,16 +154,16 @@ async def test_method_all_multiple_pages(
):
if client_type == "standard":
repos = await clients.standard.all(kind="CoreRepository", populate_store=False)
- assert not clients.standard.store._store["CoreRepository"]
+ assert clients.standard.store.count() == 0
repos = await clients.standard.all(kind="CoreRepository")
- assert len(clients.standard.store._store["CoreRepository"]) == 5
+ assert clients.standard.store.count() == 5
else:
repos = clients.sync.all(kind="CoreRepository", populate_store=False)
- assert not clients.sync.store._store["CoreRepository"]
+ assert clients.sync.store.count() == 0
repos = clients.sync.all(kind="CoreRepository")
- assert len(clients.sync.store._store["CoreRepository"]) == 5
+ assert clients.sync.store.count() == 5
assert len(repos) == 5
@@ -174,16 +174,16 @@ async def test_method_all_batching(
):
if client_type == "standard":
locations = await clients.standard.all(kind="BuiltinLocation", populate_store=False, parallel=use_parallel)
- assert not clients.standard.store._store["BuiltinLocation"]
+ assert clients.standard.store.count() == 0
locations = await clients.standard.all(kind="BuiltinLocation", parallel=use_parallel)
- assert len(clients.standard.store._store["BuiltinLocation"]) == 30
+ assert clients.standard.store.count() == 30
else:
locations = clients.sync.all(kind="BuiltinLocation", populate_store=False, parallel=use_parallel)
- assert not clients.sync.store._store["BuiltinLocation"]
+ assert clients.sync.store.count() == 0
locations = clients.sync.all(kind="BuiltinLocation", parallel=use_parallel)
- assert len(clients.sync.store._store["BuiltinLocation"]) == 30
+ assert clients.sync.store.count() == 30
assert len(locations) == 30
@@ -192,16 +192,16 @@ async def test_method_all_batching(
async def test_method_all_single_page(clients, mock_query_repository_page1_1, client_type):
if client_type == "standard":
repos = await clients.standard.all(kind="CoreRepository", populate_store=False)
- assert not clients.standard.store._store["CoreRepository"]
+ assert clients.standard.store.count() == 0
repos = await clients.standard.all(kind="CoreRepository")
- assert len(clients.standard.store._store["CoreRepository"]) == 2
+ assert clients.standard.store.count() == 2
else:
repos = clients.sync.all(kind="CoreRepository", populate_store=False)
- assert not clients.sync.store._store["CoreRepository"]
+ assert clients.sync.store.count() == 0
repos = clients.sync.all(kind="CoreRepository")
- assert len(clients.sync.store._store["CoreRepository"]) == 2
+ assert clients.sync.store.count() == 2
assert len(repos) == 2
@@ -443,7 +443,7 @@ async def test_method_filters_many(httpx_mock: HTTPXMock, clients, mock_query_re
populate_store=False,
)
assert len(repos) == 2
- assert not clients.standard.store._store["CoreRepository"]
+ assert clients.standard.store.count() == 0
repos = await clients.standard.filters(
kind="CoreRepository",
@@ -452,7 +452,7 @@ async def test_method_filters_many(httpx_mock: HTTPXMock, clients, mock_query_re
"9486cfce-87db-479d-ad73-07d80ba96a0f",
],
)
- assert len(clients.standard.store._store["CoreRepository"]) == 2
+ assert clients.standard.store.count() == 2
assert len(repos) == 2
else:
repos = clients.sync.filters(
@@ -464,7 +464,7 @@ async def test_method_filters_many(httpx_mock: HTTPXMock, clients, mock_query_re
populate_store=False,
)
assert len(repos) == 2
- assert not clients.sync.store._store["CoreRepository"]
+ assert clients.sync.store.count() == 0
repos = clients.sync.filters(
kind="CoreRepository",
@@ -473,7 +473,7 @@ async def test_method_filters_many(httpx_mock: HTTPXMock, clients, mock_query_re
"9486cfce-87db-479d-ad73-07d80ba96a0f",
],
)
- assert len(clients.sync.store._store["CoreRepository"]) == 2
+ assert clients.sync.store.count() == 2
assert len(repos) == 2
diff --git a/tests/unit/sdk/test_data/templates/broken_on_line6.j2 b/tests/unit/sdk/test_data/templates/broken_on_line6.j2
new file mode 100644
index 00000000..f7ce8b24
--- /dev/null
+++ b/tests/unit/sdk/test_data/templates/broken_on_line6.j2
@@ -0,0 +1,8 @@
+# Included file
+
+## Subsection
+
+* {{ name }}
+* {{ name }
+
+# The end
diff --git a/tests/unit/sdk/test_data/templates/hello-world.j2 b/tests/unit/sdk/test_data/templates/hello-world.j2
new file mode 100644
index 00000000..d7732488
--- /dev/null
+++ b/tests/unit/sdk/test_data/templates/hello-world.j2
@@ -0,0 +1 @@
+Hello {{ name }}
\ No newline at end of file
diff --git a/tests/unit/sdk/test_data/templates/imports-missing-file.html b/tests/unit/sdk/test_data/templates/imports-missing-file.html
new file mode 100644
index 00000000..b7006ae1
--- /dev/null
+++ b/tests/unit/sdk/test_data/templates/imports-missing-file.html
@@ -0,0 +1,8 @@
+
+
+ Some Title
+
+
+{% include 'i-do-not-exist.html' %}
+
+
\ No newline at end of file
diff --git a/tests/unit/sdk/test_data/templates/index.html b/tests/unit/sdk/test_data/templates/index.html
new file mode 100644
index 00000000..46bf6501
--- /dev/null
+++ b/tests/unit/sdk/test_data/templates/index.html
@@ -0,0 +1,8 @@
+
+
+ Some Title
+
+
+{{ highlight }
+
+
\ No newline at end of file
diff --git a/tests/unit/sdk/test_data/templates/ip_report.j2 b/tests/unit/sdk/test_data/templates/ip_report.j2
new file mode 100644
index 00000000..98f26ef3
--- /dev/null
+++ b/tests/unit/sdk/test_data/templates/ip_report.j2
@@ -0,0 +1 @@
+IP Address: {{ address | ipaddress_interface("ip") }}/{{ address | ipaddress_interface("network") | ipaddress_network("prefixlen") }}
\ No newline at end of file
diff --git a/tests/unit/sdk/test_data/templates/main.j2 b/tests/unit/sdk/test_data/templates/main.j2
new file mode 100644
index 00000000..7268ba49
--- /dev/null
+++ b/tests/unit/sdk/test_data/templates/main.j2
@@ -0,0 +1,6 @@
+Some text that works just fine
+
+{% include 'broken_on_line6.j2' %}
+
+Hello {{ name }}
+
diff --git a/tests/unit/sdk/test_data/templates/report.html b/tests/unit/sdk/test_data/templates/report.html
new file mode 100644
index 00000000..c02ee5fd
--- /dev/null
+++ b/tests/unit/sdk/test_data/templates/report.html
@@ -0,0 +1,11 @@
+
+
+
+{% for server in servers %}
+ - {{server.name}}: {{ server.ip.primary }}
+{% endfor %}
+
+
+
+
+
\ No newline at end of file
diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py
index cc7834d3..70966f6a 100644
--- a/tests/unit/sdk/test_node.py
+++ b/tests/unit/sdk/test_node.py
@@ -13,6 +13,7 @@
InfrahubNodeSync,
RelatedNodeBase,
RelationshipManagerBase,
+ parse_human_friendly_id,
)
from infrahub_sdk.schema import GenericSchema, NodeSchemaAPI
@@ -100,6 +101,13 @@ async def test_validate_method_signature(
)
+@pytest.mark.parametrize("hfid,expected_kind,expected_hfid", [("BuiltinLocation__JFK1", "BuiltinLocation", ["JFK1"])])
+def test_parse_human_friendly_id(hfid: str, expected_kind: str, expected_hfid: list[str]):
+ kind, hfid = parse_human_friendly_id(hfid)
+ assert kind == expected_kind
+ assert hfid == expected_hfid
+
+
@pytest.mark.parametrize("client_type", client_types)
async def test_init_node_no_data(client, location_schema, client_type):
if client_type == "standard":
@@ -1759,8 +1767,8 @@ async def test_node_get_relationship_from_store(
tag_red = InfrahubNodeSync(client=client, schema=tag_schema, data=tag_red_data)
tag_blue = InfrahubNodeSync(client=client, schema=tag_schema, data=tag_blue_data)
- client.store.set(key=tag_red.id, node=tag_red)
- client.store.set(key=tag_blue.id, node=tag_blue)
+ client.store.set(node=tag_red)
+ client.store.set(node=tag_blue)
assert node.primary_tag.peer == tag_red
assert node.primary_tag.get() == tag_red
@@ -2204,3 +2212,17 @@ async def test_get_pool_resources_utilization(
utilizations = ip_pool.get_pool_resources_utilization()
assert len(utilizations) == 1
assert utilizations[0]["utilization"] == 93.75
+
+
+@pytest.mark.parametrize("client_type", client_types)
+async def test_from_graphql(clients, mock_schema_query_01, location_data01, client_type):
+ if client_type == "standard":
+ schema = await clients.standard.schema.get(kind="BuiltinLocation", branch="main")
+ node = await InfrahubNode.from_graphql(
+ client=clients.standard, schema=schema, branch="main", data=location_data01
+ )
+ else:
+ schema = clients.sync.schema.get(kind="BuiltinLocation", branch="main")
+ node = InfrahubNodeSync.from_graphql(client=clients.sync, schema=schema, branch="main", data=location_data01)
+
+ assert node.id == "llllllll-llll-llll-llll-llllllllllll"
diff --git a/tests/unit/sdk/test_store.py b/tests/unit/sdk/test_store.py
index 3d02bb32..91b0ec21 100644
--- a/tests/unit/sdk/test_store.py
+++ b/tests/unit/sdk/test_store.py
@@ -1,44 +1,148 @@
import pytest
-from infrahub_sdk.node import InfrahubNode
-from infrahub_sdk.store import NodeStore
+from infrahub_sdk.exceptions import NodeInvalidError, NodeNotFoundError
+from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync
+from infrahub_sdk.store import NodeStore, NodeStoreSync
client_types = ["standard", "sync"]
@pytest.mark.parametrize("client_type", client_types)
-def test_node_store_set(client_type, clients, location_schema):
- client = getattr(clients, client_type)
+def test_node_store_set(client_type, clients, schema_with_hfid):
+ if client_type == "standard":
+ client = clients.standard
+ store = NodeStore(default_branch="main")
+ node_class = InfrahubNode
+ else:
+ client = clients.sync
+ store = NodeStoreSync(default_branch="main")
+ node_class = InfrahubNodeSync
+
data = {
"name": {"value": "JFK1"},
"description": {"value": "JFK Airport"},
"type": {"value": "SITE"},
}
- node = InfrahubNode(client=client, schema=location_schema, data=data)
+ node = node_class(client=client, schema=schema_with_hfid["location"], data=data)
+
+ store.set(key="mykey", node=node)
+
+ assert node._internal_id in store._branches[client.default_branch]._objs
+ assert "mykey" in store._branches[client.default_branch]._keys
+ assert store._branches[client.default_branch]._keys["mykey"] == node._internal_id
+
- store = NodeStore()
+@pytest.mark.parametrize("client_type", client_types)
+def test_node_store_set_no_hfid(client_type, clients, location_schema):
+ if client_type == "standard":
+ client = clients.standard
+ store = NodeStore(default_branch="main")
+ node_class = InfrahubNode
+ else:
+ client = clients.sync
+ store = NodeStoreSync(default_branch="main")
+ node_class = InfrahubNodeSync
+
+ data = {
+ "name": {"value": "JFK1"},
+ "description": {"value": "JFK Airport"},
+ "type": {"value": "SITE"},
+ }
+ node = node_class(client=client, schema=location_schema, data=data)
store.set(key="mykey", node=node)
- assert store._store["BuiltinLocation"]["mykey"]
+ assert store._branches[client.default_branch]._objs[node._internal_id] == node
+ assert node._internal_id in store._branches[client.default_branch]._objs
+ assert "mykey" in store._branches[client.default_branch]._keys
+ assert store._branches[client.default_branch]._keys["mykey"] == node._internal_id
+
+ branch_name = "mybranch"
+ store.set(key="anotherkey", node=node, branch=branch_name)
+
+ assert store._branches[branch_name]._objs[node._internal_id] == node
+ assert node._internal_id in store._branches[branch_name]._objs
+ assert "anotherkey" in store._branches[branch_name]._keys
+ assert store._branches[branch_name]._keys["anotherkey"] == node._internal_id
@pytest.mark.parametrize("client_type", client_types)
def test_node_store_get(client_type, clients, location_schema):
- client = getattr(clients, client_type)
+ if client_type == "standard":
+ client = clients.standard
+ store = NodeStore(default_branch="main")
+ node_class = InfrahubNode
+ else:
+ client = clients.sync
+ store = NodeStoreSync(default_branch="main")
+ node_class = InfrahubNodeSync
+
data = {
"id": "54f3108c-1f21-44c4-93cf-ec5737587b48",
"name": {"value": "JFK1"},
"description": {"value": "JFK Airport"},
"type": {"value": "SITE"},
}
- node = InfrahubNode(client=client, schema=location_schema, data=data)
+ node = node_class(client=client, schema=location_schema, data=data)
+
+ store.set(key="mykey", node=node)
+
+ assert store.get(key=node._internal_id).id == node.id
+ assert store.get(kind="BuiltinLocation", key="mykey").id == node.id
+ assert store.get(key="mykey").id == node.id
- store = NodeStore()
+ assert store.get(kind="BuiltinTest", key="mykey", raise_when_missing=False) is None
+
+ assert store.get(kind="BuiltinLocation", key="anotherkey", raise_when_missing=False) is None
+ assert store.get(key="anotherkey", raise_when_missing=False) is None
+
+ with pytest.raises(NodeNotFoundError):
+ store.get(kind="BuiltinLocation", key="anotherkey")
+ with pytest.raises(NodeNotFoundError):
+ store.get(key="anotherkey")
+ with pytest.raises(NodeNotFoundError):
+ store.get(key="mykey", branch="mybranch")
+
+ with pytest.raises(NodeInvalidError):
+ store.get(kind="BuiltinTest", key="mykey")
+
+ store.set(key="mykey", node=node, branch="mybranch")
+ assert store.get(key="mykey", branch="mybranch").id == node.id
+
+
+@pytest.mark.parametrize("client_type", client_types)
+def test_node_store_get_with_hfid(client_type, clients, schema_with_hfid):
+ if client_type == "standard":
+ client = clients.standard
+ store = NodeStore(default_branch="main")
+ node_class = InfrahubNode
+ else:
+ client = clients.sync
+ store = NodeStoreSync(default_branch="main")
+ node_class = InfrahubNodeSync
+
+ data = {
+ "id": "54f3108c-1f21-44c4-93cf-ec5737587b48",
+ "name": {"value": "JFK1"},
+ "description": {"value": "JFK Airport"},
+ "type": {"value": "SITE"},
+ }
+ node = node_class(client=client, schema=schema_with_hfid["location"], data=data)
store.set(key="mykey", node=node)
+
+ assert store.get(key=node._internal_id).id == node.id
+
assert store.get(kind="BuiltinLocation", key="mykey").id == node.id
assert store.get(key="mykey").id == node.id
+ assert store.get(key="BuiltinLocation__JFK1").id == node.id
+ assert store.get(kind="BuiltinLocation", key="JFK1").id == node.id
+ assert store.get(key="54f3108c-1f21-44c4-93cf-ec5737587b48").id == node.id
assert store.get(kind="BuiltinLocation", key="anotherkey", raise_when_missing=False) is None
assert store.get(key="anotherkey", raise_when_missing=False) is None
+
+ with pytest.raises(NodeNotFoundError):
+ store.get(kind="BuiltinLocation", key="anotherkey")
+ with pytest.raises(NodeNotFoundError):
+ store.get(key="anotherkey")
diff --git a/tests/unit/sdk/test_store_branch.py b/tests/unit/sdk/test_store_branch.py
new file mode 100644
index 00000000..aead55a2
--- /dev/null
+++ b/tests/unit/sdk/test_store_branch.py
@@ -0,0 +1,95 @@
+import pytest
+
+from infrahub_sdk.client import InfrahubClient
+from infrahub_sdk.exceptions import NodeNotFoundError
+from infrahub_sdk.node import InfrahubNode
+from infrahub_sdk.store import NodeStoreBranch
+
+
+def test_node_store_set(client: InfrahubClient, schema_with_hfid):
+ data = {
+ "name": {"value": "JFK1"},
+ "description": {"value": "JFK Airport"},
+ "type": {"value": "SITE"},
+ }
+ node = InfrahubNode(client=client, schema=schema_with_hfid["location"], data=data)
+
+ store = NodeStoreBranch(name="mybranch")
+ store.set(key="mykey", node=node)
+
+ assert node._internal_id in store._objs
+ assert "mykey" in store._keys
+ assert store._keys["mykey"] == node._internal_id
+
+
+def test_node_store_set_no_hfid(client: InfrahubClient, location_schema):
+ data = {
+ "name": {"value": "JFK1"},
+ "description": {"value": "JFK Airport"},
+ "type": {"value": "SITE"},
+ }
+ node = InfrahubNode(client=client, schema=location_schema, data=data)
+
+ store = NodeStoreBranch(name="mybranch")
+
+ store.set(key="mykey", node=node)
+
+ assert node._internal_id in store._objs
+ assert "mykey" in store._keys
+ assert store._keys["mykey"] == node._internal_id
+
+
+def test_node_store_get(client: InfrahubClient, location_schema):
+ data = {
+ "id": "54f3108c-1f21-44c4-93cf-ec5737587b48",
+ "name": {"value": "JFK1"},
+ "description": {"value": "JFK Airport"},
+ "type": {"value": "SITE"},
+ }
+ node = InfrahubNode(client=client, schema=location_schema, data=data)
+
+ store = NodeStoreBranch(name="mybranch")
+
+ store.set(key="mykey", node=node)
+
+ assert store.get(key=node._internal_id).id == node.id
+ assert store.get(kind="BuiltinLocation", key="mykey").id == node.id
+ assert store.get(key="mykey").id == node.id
+
+ assert store.get(kind="BuiltinLocation", key="anotherkey", raise_when_missing=False) is None
+ assert store.get(key="anotherkey", raise_when_missing=False) is None
+
+ with pytest.raises(NodeNotFoundError):
+ store.get(kind="BuiltinLocation", key="anotherkey")
+ with pytest.raises(NodeNotFoundError):
+ store.get(key="anotherkey")
+
+
+def test_node_store_get_with_hfid(client: InfrahubClient, schema_with_hfid):
+ data = {
+ "id": "54f3108c-1f21-44c4-93cf-ec5737587b48",
+ "name": {"value": "JFK1"},
+ "description": {"value": "JFK Airport"},
+ "type": {"value": "SITE"},
+ }
+ node = InfrahubNode(client=client, schema=schema_with_hfid["location"], data=data)
+
+ store = NodeStoreBranch(name="mybranch")
+
+ store.set(key="mykey", node=node)
+
+ assert store.get(key=node._internal_id).id == node.id
+
+ assert store.get(kind="BuiltinLocation", key="mykey").id == node.id
+ assert store.get(key="mykey").id == node.id
+ assert store.get(key="BuiltinLocation__JFK1").id == node.id
+ assert store.get(kind="BuiltinLocation", key="JFK1").id == node.id
+ assert store.get(key="54f3108c-1f21-44c4-93cf-ec5737587b48").id == node.id
+
+ assert store.get(kind="BuiltinLocation", key="anotherkey", raise_when_missing=False) is None
+ assert store.get(key="anotherkey", raise_when_missing=False) is None
+
+ with pytest.raises(NodeNotFoundError):
+ store.get(kind="BuiltinLocation", key="anotherkey")
+ with pytest.raises(NodeNotFoundError):
+ store.get(key="anotherkey")
diff --git a/tests/unit/sdk/test_template.py b/tests/unit/sdk/test_template.py
new file mode 100644
index 00000000..b8854e54
--- /dev/null
+++ b/tests/unit/sdk/test_template.py
@@ -0,0 +1,312 @@
+from dataclasses import dataclass, field
+from pathlib import Path
+from typing import Any
+
+import pytest
+from rich.syntax import Syntax
+from rich.traceback import Frame
+
+from infrahub_sdk.template import Jinja2Template
+from infrahub_sdk.template.exceptions import (
+ JinjaTemplateError,
+ JinjaTemplateNotFoundError,
+ JinjaTemplateOperationViolationError,
+ JinjaTemplateSyntaxError,
+ JinjaTemplateUndefinedError,
+)
+from infrahub_sdk.template.filters import (
+ BUILTIN_FILTERS,
+ NETUTILS_FILTERS,
+ FilterDefinition,
+)
+from infrahub_sdk.template.models import UndefinedJinja2Error
+
+CURRENT_DIRECTORY = Path(__file__).parent
+TEMPLATE_DIRECTORY = CURRENT_DIRECTORY / "test_data/templates"
+
+
+@dataclass
+class JinjaTestCase:
+ name: str
+ template: str
+ variables: dict[str, Any]
+ expected: str
+ expected_variables: list[str] = field(default_factory=list)
+
+
+@dataclass
+class JinjaTestCaseFailing:
+ name: str
+ template: str
+ variables: dict[str, Any]
+ error: JinjaTemplateError
+
+
+SUCCESSFUL_STRING_TEST_CASES = [
+ JinjaTestCase(
+ name="hello-world",
+ template="Hello {{ name }}",
+ variables={"name": "Infrahub"},
+ expected="Hello Infrahub",
+ expected_variables=["name"],
+ ),
+ JinjaTestCase(
+ name="hello-if-defined",
+ template="Hello {% if name is undefined %}stranger{% else %}{{name}}{% endif %}",
+ variables={"name": "OpsMill"},
+ expected="Hello OpsMill",
+ expected_variables=["name"],
+ ),
+ JinjaTestCase(
+ name="hello-if-undefined",
+ template="Hello {% if name is undefined %}stranger{% else %}{{name}}{% endif %}",
+ variables={},
+ expected="Hello stranger",
+ expected_variables=["name"],
+ ),
+ JinjaTestCase(
+ name="netutils-ip-addition",
+ template="IP={{ ip_address|ip_addition(200) }}",
+ variables={"ip_address": "192.168.12.15"},
+ expected="IP=192.168.12.215",
+ expected_variables=["ip_address"],
+ ),
+]
+
+
+@pytest.mark.parametrize(
+ "test_case",
+ [pytest.param(tc, id=tc.name) for tc in SUCCESSFUL_STRING_TEST_CASES],
+)
+async def test_render_string(test_case: JinjaTestCase) -> None:
+ jinja = Jinja2Template(template=test_case.template)
+ assert test_case.expected == await jinja.render(variables=test_case.variables)
+ assert test_case.expected_variables == jinja.get_variables()
+
+
+SUCCESSFUL_FILE_TEST_CASES = [
+ JinjaTestCase(
+ name="hello-world",
+ template="hello-world.j2",
+ variables={"name": "Infrahub"},
+ expected="Hello Infrahub",
+ expected_variables=["name"],
+ ),
+ JinjaTestCase(
+ name="netutils-convert-address",
+ template="ip_report.j2",
+ variables={"address": "192.168.18.40/255.255.255.0"},
+ expected="IP Address: 192.168.18.40/24",
+ expected_variables=["address"],
+ ),
+]
+
+
+@pytest.mark.parametrize(
+ "test_case",
+ [pytest.param(tc, id=tc.name) for tc in SUCCESSFUL_FILE_TEST_CASES],
+)
+async def test_render_template_from_file(test_case: JinjaTestCase) -> None:
+ jinja = Jinja2Template(template=Path(test_case.template), template_directory=TEMPLATE_DIRECTORY)
+ assert test_case.expected == await jinja.render(variables=test_case.variables)
+ assert test_case.expected_variables == jinja.get_variables()
+ assert jinja.get_template()
+
+
+FAILING_STRING_TEST_CASES = [
+ JinjaTestCaseFailing(
+ name="missing-closing-end-if",
+ template="Hello {% if name is undefined %}stranger{% else %}{{name}}{% endif",
+ variables={},
+ error=JinjaTemplateSyntaxError(
+ message="unexpected end of template, expected 'end of statement block'.",
+ lineno=1,
+ ),
+ ),
+ JinjaTestCaseFailing(
+ name="fail-on-line-2",
+ template="Hello \n{{ name }",
+ variables={},
+ error=JinjaTemplateSyntaxError(
+ message="unexpected '}'",
+ lineno=2,
+ ),
+ ),
+ JinjaTestCaseFailing(
+ name="nested-undefined",
+ template="Hello {{ person.firstname }}",
+ variables={"person": {"lastname": "Rogers"}},
+ error=JinjaTemplateUndefinedError(
+ message="'dict object' has no attribute 'firstname'",
+ errors=[
+ UndefinedJinja2Error(
+ frame=Frame(filename="", lineno=1, name="top-level template code"),
+ syntax=Syntax(code="", lexer="text"),
+ )
+ ],
+ ),
+ ),
+]
+
+
+@pytest.mark.parametrize(
+ "test_case",
+ [pytest.param(tc, id=tc.name) for tc in FAILING_STRING_TEST_CASES],
+)
+async def test_render_string_errors(test_case: JinjaTestCaseFailing) -> None:
+ jinja = Jinja2Template(template=test_case.template, template_directory=TEMPLATE_DIRECTORY)
+ with pytest.raises(test_case.error.__class__) as exc:
+ await jinja.render(variables=test_case.variables)
+
+ _compare_errors(expected=test_case.error, received=exc.value)
+
+
+FAILING_FILE_TEST_CASES = [
+ JinjaTestCaseFailing(
+ name="missing-initial-file",
+ template="missing.html",
+ variables={},
+ error=JinjaTemplateNotFoundError(
+ message=f"'missing.html' not found in search path: '{TEMPLATE_DIRECTORY}'",
+ filename="missing.html",
+ ),
+ ),
+ JinjaTestCaseFailing(
+ name="broken-template",
+ template="index.html",
+ variables={},
+ error=JinjaTemplateSyntaxError(
+ message="unexpected '}'",
+ filename="/index.html",
+ lineno=6,
+ ),
+ ),
+ JinjaTestCaseFailing(
+ name="secondary-import-broken",
+ template="main.j2",
+ variables={},
+ error=JinjaTemplateSyntaxError(
+ message="unexpected '}'",
+ filename="/broken_on_line6.j2",
+ lineno=6,
+ ),
+ ),
+ JinjaTestCaseFailing(
+ name="secondary-import-missing",
+ template="imports-missing-file.html",
+ variables={},
+ error=JinjaTemplateNotFoundError(
+ message=f"'i-do-not-exist.html' not found in search path: '{TEMPLATE_DIRECTORY}'",
+ filename="i-do-not-exist.html",
+ base_template="imports-missing-file.html",
+ ),
+ ),
+ JinjaTestCaseFailing(
+ name="invalid-variable-input",
+ template="report.html",
+ variables={
+ "servers": [
+ {"name": "server1", "ip": {"primary": "172.18.12.1"}},
+ {"name": "server1"},
+ ]
+ },
+ error=JinjaTemplateUndefinedError(
+ message="'dict object' has no attribute 'ip'",
+ errors=[
+ UndefinedJinja2Error(
+ frame=Frame(
+ filename=f"{TEMPLATE_DIRECTORY}/report.html",
+ lineno=5,
+ name="top-level template code",
+ ),
+ syntax=Syntax(
+ code="\n\n\n{% for server in servers %}\n - {{server.name}}: {{ server.ip.primary }}
\n{% endfor %}\n
\n\n\n\n\n", # noqa E501
+ lexer="",
+ ),
+ )
+ ],
+ ),
+ ),
+]
+
+
+@pytest.mark.parametrize(
+ "test_case",
+ [pytest.param(tc, id=tc.name) for tc in FAILING_FILE_TEST_CASES],
+)
+async def test_manage_file_based_errors(test_case: JinjaTestCaseFailing) -> None:
+ jinja = Jinja2Template(template=Path(test_case.template), template_directory=TEMPLATE_DIRECTORY)
+ with pytest.raises(test_case.error.__class__) as exc:
+ await jinja.render(variables=test_case.variables)
+
+ _compare_errors(expected=test_case.error, received=exc.value)
+
+
+async def test_manage_unhandled_error() -> None:
+ jinja = Jinja2Template(
+ template="Hello {{ number | divide_by_zero }}",
+ filters={"divide_by_zero": _divide_by_zero},
+ )
+ with pytest.raises(JinjaTemplateError) as exc:
+ await jinja.render(variables={"number": 1})
+
+ assert exc.value.message == "division by zero"
+
+
+async def test_validate_filter() -> None:
+ jinja = Jinja2Template(template="{{ network | get_all_host }}")
+ jinja.validate(restricted=False)
+ with pytest.raises(JinjaTemplateOperationViolationError) as exc:
+ jinja.validate(restricted=True)
+
+ assert exc.value.message == "The 'get_all_host' filter isn't allowed to be used"
+
+
+async def test_validate_operation() -> None:
+ jinja = Jinja2Template(template="Hello {% include 'very-forbidden.j2' %}")
+ with pytest.raises(JinjaTemplateOperationViolationError) as exc:
+ jinja.validate(restricted=True)
+
+ assert (
+ exc.value.message == "These operations are forbidden for string based templates: ['Call', 'Import', 'Include']"
+ )
+
+
+@pytest.mark.parametrize(
+ "filter_collection",
+ [
+ pytest.param(BUILTIN_FILTERS, id="builtin-filters"),
+ pytest.param(NETUTILS_FILTERS, id="netutils-filters"),
+ ],
+)
+def test_validate_filter_sorting(filter_collection: list[FilterDefinition]) -> None:
+ """Test to validate that the filter names are in alphabetical order, for the docs and general sanity."""
+ names = [filter_definition.name for filter_definition in filter_collection]
+ assert names == sorted(names)
+
+
+def _divide_by_zero(number: int) -> float:
+ return number / 0
+
+
+def _compare_errors(expected: JinjaTemplateError, received: JinjaTemplateError) -> None:
+ if isinstance(expected, JinjaTemplateNotFoundError) and isinstance(received, JinjaTemplateNotFoundError):
+ assert expected.message == received.message
+ assert expected.filename == received.filename
+ assert expected.base_template == received.base_template
+ elif isinstance(expected, JinjaTemplateSyntaxError) and isinstance(received, JinjaTemplateSyntaxError):
+ assert expected.message == received.message
+ assert expected.filename == received.filename
+ assert expected.lineno == received.lineno
+ elif isinstance(expected, JinjaTemplateUndefinedError) and isinstance(received, JinjaTemplateUndefinedError):
+ assert expected.message == received.message
+ assert len(expected.errors) == len(received.errors)
+ for i in range(len(expected.errors)):
+ assert expected.errors[i].frame.name == received.errors[i].frame.name
+ assert expected.errors[i].frame.filename == received.errors[i].frame.filename
+ assert expected.errors[i].frame.lineno == received.errors[i].frame.lineno
+ assert expected.errors[i].syntax.code == received.errors[i].syntax.code
+ assert expected.errors[i].syntax.lexer.__class__ == received.errors[i].syntax.lexer.__class__
+
+ else:
+ raise Exception("This should never happen")
diff --git a/tests/unit/sdk/test_utils.py b/tests/unit/sdk/test_utils.py
index 7f628c62..88c25644 100644
--- a/tests/unit/sdk/test_utils.py
+++ b/tests/unit/sdk/test_utils.py
@@ -18,6 +18,7 @@
dict_hash,
duplicates,
extract_fields,
+ generate_short_id,
get_flat_value,
is_valid_url,
is_valid_uuid,
@@ -26,6 +27,12 @@
)
+def test_generate_short_id():
+ assert len(generate_short_id()) == 22
+ assert isinstance(generate_short_id(), str)
+ assert generate_short_id() != generate_short_id()
+
+
def test_is_valid_uuid():
assert is_valid_uuid(uuid.uuid4()) is True
assert is_valid_uuid(uuid.UUID("ba0aecd9-546a-4d77-9187-23e17a20633e")) is True