Skip to content

Commit 55ad3bc

Browse files
authored
Merge pull request #6404 from opsmill/pog-stable-to-develop-20250505
Merge stable into develop with resolved merge conflicts
2 parents 12481ec + d9e18db commit 55ad3bc

File tree

71 files changed

+768
-216
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

71 files changed

+768
-216
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@ development/docker-compose.dev-override.yml
1717
**/.idea/**
1818
dist/*
1919

20+
# As we might run collect command from this repo
21+
support_logs_*
22+
2023
# Direnv files (https://direnv.net/)
2124
.direnv/
2225
.envrc

CHANGELOG.md

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,22 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang
1111

1212
<!-- towncrier release notes start -->
1313

14+
## [Infrahub - v1.2.8](https://github.com/opsmill/infrahub/tree/infrahub-v1.2.8) - 2025-05-01
15+
16+
### Added
17+
18+
- Added support for "convert_query_response" for Python transforms. The feature works the same was as with Generators. Note any non default branch will need to be rebased after this upgrade. ([#6383](https://github.com/opsmill/infrahub/issues/6383))
19+
- Enable HCL syntax highlighting for artifacts
20+
21+
### Fixed
22+
23+
- Improve performance when retrieving nodes that have thousands of relationships
24+
- Improve performance of git credential helper
25+
26+
### Housekeeping
27+
28+
- Background performance improvements due to Prefect 3.3.7 upgrade
29+
1430
## [Infrahub - v1.2.7](https://github.com/opsmill/infrahub/tree/infrahub-v1.2.7) - 2025-04-28
1531

1632
### Security

backend/infrahub/api/transformation.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,7 @@ async def transform_python(
8888
branch=branch_params.branch.name,
8989
transform_location=f"{transform.file_path.value}::{transform.class_name.value}",
9090
timeout=transform.timeout.value,
91+
convert_query_response=transform.convert_query_response.value or False,
9192
data=data,
9293
)
9394

backend/infrahub/artifacts/models.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,10 @@ class CheckArtifactCreate(BaseModel):
1212
content_type: str = Field(..., description="Content type of the artifact")
1313
transform_type: str = Field(..., description="The type of transform associated with this artifact")
1414
transform_location: str = Field(..., description="The transforms location within the repository")
15+
convert_query_response: bool = Field(
16+
default=False,
17+
description="Indicate if the query response should be converted to InfrahubNode objects for Python transforms",
18+
)
1519
repository_id: str = Field(..., description="The unique ID of the Repository")
1620
repository_name: str = Field(..., description="The name of the Repository")
1721
repository_kind: str = Field(..., description="The kind of the Repository")

backend/infrahub/cli/db.py

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
from infrahub.core import registry
2424
from infrahub.core.graph import GRAPH_VERSION
2525
from infrahub.core.graph.constraints import ConstraintManagerBase, ConstraintManagerMemgraph, ConstraintManagerNeo4j
26-
from infrahub.core.graph.index import node_indexes, rel_indexes
26+
from infrahub.core.graph.index import attr_value_index, node_indexes, rel_indexes
2727
from infrahub.core.graph.schema import (
2828
GRAPH_SCHEMA,
2929
GraphAttributeProperties,
@@ -48,6 +48,8 @@
4848
from infrahub.core.validators.models.validate_migration import SchemaValidateMigrationData
4949
from infrahub.core.validators.tasks import schema_validate_migrations
5050
from infrahub.database import DatabaseType
51+
from infrahub.database.memgraph import IndexManagerMemgraph
52+
from infrahub.database.neo4j import IndexManagerNeo4j
5153
from infrahub.log import get_logger
5254
from infrahub.services import InfrahubServices
5355
from infrahub.services.adapters.message_bus.local import BusSimulator
@@ -59,6 +61,7 @@
5961
if TYPE_CHECKING:
6062
from infrahub.cli.context import CliContext
6163
from infrahub.database import InfrahubDatabase
64+
from infrahub.database.index import IndexManagerBase
6265

6366
app = AsyncTyper()
6467
app.add_typer(patch_app, name="patch")
@@ -249,14 +252,20 @@ async def index(
249252

250253
context: CliContext = ctx.obj
251254
dbdriver = await context.init_db(retry=1)
252-
dbdriver.manager.index.init(nodes=node_indexes, rels=rel_indexes)
255+
if dbdriver.db_type is DatabaseType.MEMGRAPH:
256+
index_manager: IndexManagerBase = IndexManagerMemgraph(db=dbdriver)
257+
index_manager = IndexManagerNeo4j(db=dbdriver)
258+
259+
if config.SETTINGS.experimental_features.value_db_index:
260+
node_indexes.append(attr_value_index)
261+
index_manager.init(nodes=node_indexes, rels=rel_indexes)
253262

254263
if action == IndexAction.ADD:
255-
await dbdriver.manager.index.add()
264+
await index_manager.add()
256265
elif action == IndexAction.DROP:
257-
await dbdriver.manager.index.drop()
266+
await index_manager.drop()
258267

259-
indexes = await dbdriver.manager.index.list()
268+
indexes = await index_manager.list()
260269

261270
console = Console()
262271

@@ -412,7 +421,7 @@ async def update_core_schema(
412421
update_db=True,
413422
)
414423
default_branch.update_schema_hash()
415-
rprint("The Core Schema has been updated")
424+
rprint("The Core Schema has been updated, make sure to rebase any open branches after the upgrade")
416425
if debug:
417426
rprint(f"New schema hash: {default_branch.active_schema_hash.main}")
418427
await default_branch.save(db=dbt)

backend/infrahub/computed_attribute/tasks.py

Lines changed: 34 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,7 @@ async def process_transform(
113113
location=f"{transform.file_path.value}::{transform.class_name.value}",
114114
data=data,
115115
client=service.client,
116+
convert_query_response=transform.convert_query_response.value,
116117
) # type: ignore[misc]
117118

118119
await service.client.execute_graphql(
@@ -301,15 +302,24 @@ async def computed_attribute_setup_jinja2(
301302

302303
triggers = await gather_trigger_computed_attribute_jinja2()
303304

304-
for trigger in triggers:
305-
if event_name != BranchDeletedEvent.event_name and trigger.branch == branch_name:
305+
# Since we can have multiple trigger per NodeKind
306+
# we need to extract the list of unique node that should be processed
307+
# also
308+
# Because the automation in Prefect doesn't capture all information about the computed attribute
309+
# we can't tell right now if a given computed attribute has changed and need to be updated
310+
unique_nodes: set[tuple[str, str, str]] = {
311+
(trigger.branch, trigger.computed_attribute.kind, trigger.computed_attribute.attribute.name)
312+
for trigger in triggers
313+
}
314+
for branch, kind, attribute_name in unique_nodes:
315+
if event_name != BranchDeletedEvent.event_name and branch == branch_name:
306316
await service.workflow.submit_workflow(
307317
workflow=TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES,
308318
context=context,
309319
parameters={
310-
"branch_name": trigger.branch,
311-
"computed_attribute_name": trigger.computed_attribute.attribute.name,
312-
"computed_attribute_kind": trigger.computed_attribute.kind,
320+
"branch_name": branch,
321+
"computed_attribute_name": attribute_name,
322+
"computed_attribute_kind": kind,
313323
},
314324
)
315325

@@ -319,6 +329,7 @@ async def computed_attribute_setup_jinja2(
319329
client=prefect_client,
320330
triggers=triggers,
321331
trigger_type=TriggerType.COMPUTED_ATTR_JINJA2,
332+
force_update=False,
322333
) # type: ignore[misc]
323334

324335
log.info(f"{len(triggers)} Computed Attribute for Jinja2 automation configuration completed")
@@ -346,18 +357,29 @@ async def computed_attribute_setup_python(
346357

347358
triggers_python, triggers_python_query = await gather_trigger_computed_attribute_python(db=db)
348359

349-
for trigger in triggers_python:
350-
if event_name != BranchDeletedEvent.event_name and trigger.branch == branch_name:
351-
log.info(
352-
f"Triggering update for {trigger.computed_attribute.computed_attribute.attribute.name} on {branch_name}"
353-
)
360+
# Since we can have multiple trigger per NodeKind
361+
# we need to extract the list of unique node that should be processed
362+
# also
363+
# Because the automation in Prefect doesn't capture all information about the computed attribute
364+
# we can't tell right now if a given computed attribute has changed and need to be updated
365+
unique_nodes: set[tuple[str, str, str]] = {
366+
(
367+
trigger.branch,
368+
trigger.computed_attribute.computed_attribute.kind,
369+
trigger.computed_attribute.computed_attribute.attribute.name,
370+
)
371+
for trigger in triggers_python
372+
}
373+
for branch, kind, attribute_name in unique_nodes:
374+
if event_name != BranchDeletedEvent.event_name and branch == branch_name:
375+
log.info(f"Triggering update for {kind}.{attribute_name} on {branch}")
354376
await service.workflow.submit_workflow(
355377
workflow=TRIGGER_UPDATE_PYTHON_COMPUTED_ATTRIBUTES,
356378
context=context,
357379
parameters={
358380
"branch_name": branch_name,
359-
"computed_attribute_name": trigger.computed_attribute.computed_attribute.attribute.name,
360-
"computed_attribute_kind": trigger.computed_attribute.computed_attribute.kind,
381+
"computed_attribute_name": attribute_name,
382+
"computed_attribute_kind": kind,
361383
},
362384
)
363385

backend/infrahub/core/initialization.py

Lines changed: 23 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
import importlib
2+
from typing import TYPE_CHECKING
23
from uuid import uuid4
34

45
from infrahub import config, lock
6+
from infrahub.constants.database import DatabaseType
57
from infrahub.core import registry
68
from infrahub.core.branch import Branch
79
from infrahub.core.constants import (
@@ -13,6 +15,7 @@
1315
PermissionDecision,
1416
)
1517
from infrahub.core.graph import GRAPH_VERSION
18+
from infrahub.core.graph.index import attr_value_index, node_indexes, rel_indexes
1619
from infrahub.core.manager import NodeManager
1720
from infrahub.core.node import Node
1821
from infrahub.core.node.ipam import BuiltinIPPrefix
@@ -25,13 +28,18 @@
2528
from infrahub.core.schema import SchemaRoot, core_models, internal_schema
2629
from infrahub.core.schema.manager import SchemaManager
2730
from infrahub.database import InfrahubDatabase
31+
from infrahub.database.memgraph import IndexManagerMemgraph
32+
from infrahub.database.neo4j import IndexManagerNeo4j
2833
from infrahub.exceptions import DatabaseError
2934
from infrahub.graphql.manager import GraphQLSchemaManager
3035
from infrahub.log import get_logger
3136
from infrahub.menu.utils import create_default_menu
3237
from infrahub.permissions import PermissionBackend
3338
from infrahub.storage import InfrahubObjectStorage
3439

40+
if TYPE_CHECKING:
41+
from infrahub.database.index import IndexManagerBase
42+
3543
log = get_logger()
3644

3745

@@ -115,7 +123,19 @@ async def initialize_registry(db: InfrahubDatabase, initialize: bool = False) ->
115123
registry.permission_backends = initialize_permission_backends()
116124

117125

118-
async def initialization(db: InfrahubDatabase) -> None:
126+
async def add_indexes(db: InfrahubDatabase) -> None:
127+
if db.db_type is DatabaseType.MEMGRAPH:
128+
index_manager: IndexManagerBase = IndexManagerMemgraph(db=db)
129+
index_manager = IndexManagerNeo4j(db=db)
130+
131+
if config.SETTINGS.experimental_features.value_db_index:
132+
node_indexes.append(attr_value_index)
133+
index_manager.init(nodes=node_indexes, rels=rel_indexes)
134+
log.debug("Loading database indexes ..")
135+
await index_manager.add()
136+
137+
138+
async def initialization(db: InfrahubDatabase, add_database_indexes: bool = False) -> None:
119139
if config.SETTINGS.database.db_type == config.DatabaseType.MEMGRAPH:
120140
session = await db.session()
121141
await session.run(query="SET DATABASE SETTING 'log.level' TO 'INFO'")
@@ -129,12 +149,8 @@ async def initialization(db: InfrahubDatabase) -> None:
129149
log.debug("Checking Root Node")
130150
await initialize_registry(db=db, initialize=True)
131151

132-
# Add Indexes to the database
133-
if db.manager.index.initialized:
134-
log.debug("Loading database indexes ..")
135-
await db.manager.index.add()
136-
else:
137-
log.warning("The database index manager hasn't been initialized.")
152+
if add_database_indexes:
153+
await add_indexes(db=db)
138154

139155
# ---------------------------------------------------
140156
# Load all schema in the database into the registry

backend/infrahub/core/manager.py

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1229,20 +1229,31 @@ async def _enrich_node_dicts_with_relationships(
12291229
if not prefetch_relationships and not fields:
12301230
return
12311231
cardinality_one_identifiers_by_kind: dict[str, dict[str, RelationshipDirection]] | None = None
1232-
all_identifiers: list[str] | None = None
1232+
outbound_identifiers: set[str] | None = None
1233+
inbound_identifiers: set[str] | None = None
1234+
bidirectional_identifiers: set[str] | None = None
12331235
if not prefetch_relationships:
12341236
cardinality_one_identifiers_by_kind = _get_cardinality_one_identifiers_by_kind(
12351237
nodes=nodes_by_id.values(), fields=fields or {}
12361238
)
1237-
all_identifiers_set: set[str] = set()
1239+
outbound_identifiers = set()
1240+
inbound_identifiers = set()
1241+
bidirectional_identifiers = set()
12381242
for identifier_direction_map in cardinality_one_identifiers_by_kind.values():
1239-
all_identifiers_set.update(identifier_direction_map.keys())
1240-
all_identifiers = list(all_identifiers_set)
1243+
for identifier, direction in identifier_direction_map.items():
1244+
if direction is RelationshipDirection.OUTBOUND:
1245+
outbound_identifiers.add(identifier)
1246+
elif direction is RelationshipDirection.INBOUND:
1247+
inbound_identifiers.add(identifier)
1248+
elif direction is RelationshipDirection.BIDIR:
1249+
bidirectional_identifiers.add(identifier)
12411250

12421251
query = await NodeListGetRelationshipsQuery.init(
12431252
db=db,
12441253
ids=list(nodes_by_id.keys()),
1245-
relationship_identifiers=all_identifiers,
1254+
outbound_identifiers=None if outbound_identifiers is None else list(outbound_identifiers),
1255+
inbound_identifiers=None if inbound_identifiers is None else list(inbound_identifiers),
1256+
bidirectional_identifiers=None if bidirectional_identifiers is None else list(bidirectional_identifiers),
12461257
branch=branch,
12471258
at=at,
12481259
branch_agnostic=branch_agnostic,

backend/infrahub/core/migrations/graph/m014_remove_index_attr_value.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from infrahub.core.query import Query # noqa: TC001
88
from infrahub.database import DatabaseType
99
from infrahub.database.index import IndexItem
10+
from infrahub.database.neo4j import IndexManagerNeo4j
1011

1112
from ..shared import GraphMigration
1213

@@ -30,8 +31,9 @@ async def execute(self, db: InfrahubDatabase) -> MigrationResult:
3031
return result
3132

3233
try:
33-
db.manager.index.init(nodes=[INDEX_TO_DELETE], rels=[])
34-
await db.manager.index.drop()
34+
index_manager = IndexManagerNeo4j(db=db)
35+
index_manager.init(nodes=[INDEX_TO_DELETE], rels=[])
36+
await index_manager.drop()
3537
except Exception as exc:
3638
result.errors.append(str(exc))
3739
return result

backend/infrahub/core/protocols.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -478,6 +478,7 @@ class CoreTransformJinja2(CoreTransformation):
478478
class CoreTransformPython(CoreTransformation):
479479
file_path: String
480480
class_name: String
481+
convert_query_response: BooleanOptional
481482

482483

483484
class CoreUserValidator(CoreValidator):

0 commit comments

Comments
 (0)