Skip to content
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.

## [Unreleased]

### Changed

- Simplified Patch class and updated patch script creation including adding nest creation for merge patch [#420](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/420)
- Added default environment variable `STAC_ITEM_LIMIT` to SFEOS for result limiting of returned items and STAC collections [#419](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/419)

## [v6.2.0] - 2025-08-27
Expand Down
2 changes: 2 additions & 0 deletions stac_fastapi/core/stac_fastapi/core/base_database_logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ async def json_patch_item(
item_id: str,
operations: List,
base_url: str,
create_nest: bool = False,
refresh: bool = True,
) -> Dict:
"""Patch a item in the database follows RF6902."""
Expand Down Expand Up @@ -94,6 +95,7 @@ async def json_patch_collection(
collection_id: str,
operations: List,
base_url: str,
create_nest: bool = False,
refresh: bool = True,
) -> Dict:
"""Patch a collection in the database follows RF6902."""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -886,6 +886,7 @@ async def merge_patch_item(
item_id=item_id,
operations=operations,
base_url=base_url,
create_nest=True,
refresh=refresh,
)

Expand All @@ -895,6 +896,7 @@ async def json_patch_item(
item_id: str,
operations: List[PatchOperation],
base_url: str,
create_nest: bool = False,
refresh: bool = True,
) -> Item:
"""Database logic for json patching an item following RF6902.
Expand Down Expand Up @@ -929,7 +931,7 @@ async def json_patch_item(
else:
script_operations.append(operation)

script = operations_to_script(script_operations)
script = operations_to_script(script_operations, create_nest=create_nest)

try:
search_response = await self.client.search(
Expand Down Expand Up @@ -1265,6 +1267,7 @@ async def merge_patch_collection(
collection_id=collection_id,
operations=operations,
base_url=base_url,
create_nest=True,
refresh=refresh,
)

Expand All @@ -1273,6 +1276,7 @@ async def json_patch_collection(
collection_id: str,
operations: List[PatchOperation],
base_url: str,
create_nest: bool = False,
refresh: bool = True,
) -> Collection:
"""Database logic for json patching a collection following RF6902.
Expand Down Expand Up @@ -1300,7 +1304,7 @@ async def json_patch_collection(
else:
script_operations.append(operation)

script = operations_to_script(script_operations)
script = operations_to_script(script_operations, create_nest=create_nest)

try:
await self.client.update(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -869,6 +869,7 @@ async def merge_patch_item(
item_id=item_id,
operations=operations,
base_url=base_url,
create_nest=True,
refresh=refresh,
)

Expand All @@ -878,6 +879,7 @@ async def json_patch_item(
item_id: str,
operations: List[PatchOperation],
base_url: str,
create_nest: bool = False,
refresh: bool = True,
) -> Item:
"""Database logic for json patching an item following RF6902.
Expand Down Expand Up @@ -912,7 +914,7 @@ async def json_patch_item(
else:
script_operations.append(operation)

script = operations_to_script(script_operations)
script = operations_to_script(script_operations, create_nest=create_nest)

try:
search_response = await self.client.search(
Expand Down Expand Up @@ -1220,6 +1222,7 @@ async def merge_patch_collection(
collection_id=collection_id,
operations=operations,
base_url=base_url,
create_nest=True,
refresh=refresh,
)

Expand All @@ -1228,6 +1231,7 @@ async def json_patch_collection(
collection_id: str,
operations: List[PatchOperation],
base_url: str,
create_nest: bool = False,
refresh: bool = True,
) -> Collection:
"""Database logic for json patching a collection following RF6902.
Expand Down Expand Up @@ -1255,7 +1259,7 @@ async def json_patch_collection(
else:
script_operations.append(operation)

script = operations_to_script(script_operations)
script = operations_to_script(script_operations, create_nest=create_nest)

try:
await self.client.update(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def merge_to_operations(data: Dict) -> List:
nested_operations = merge_to_operations(value)

for nested_operation in nested_operations:
nested_operation.path = f"{key}.{nested_operation.path}"
nested_operation.path = f"{key}/{nested_operation.path}"
operations.append(nested_operation)

else:
Expand All @@ -90,6 +90,7 @@ def check_commands(
op: str,
path: ElasticPath,
from_path: bool = False,
create_nest: bool = False,
) -> None:
"""Add Elasticsearch checks to operation.

Expand All @@ -101,25 +102,44 @@ def check_commands(

"""
if path.nest:
commands.add(
f"if (!ctx._source.containsKey('{path.nest}'))"
f"{{Debug.explain('{path.nest} does not exist');}}"
)

if path.index or op in ["remove", "replace", "test"] or from_path:
commands.add(
f"if (!ctx._source{path.es_nest}.containsKey('{path.key}'))"
f"{{Debug.explain('{path.key} does not exist in {path.nest}');}}"
)

if from_path and path.index is not None:
commands.add(
f"if ((ctx._source{path.es_location} instanceof ArrayList"
f" && ctx._source{path.es_location}.size() < {path.index})"
f" || (!(ctx._source{path.es_location} instanceof ArrayList)"
f" && !ctx._source{path.es_location}.containsKey('{path.index}')))"
f"{{Debug.explain('{path.path} does not exist');}}"
)
part_nest = ""
for index, path_part in enumerate(path.parts):

# Create nested dictionaries if not present for merge operations
if create_nest and not from_path:
value = "[:]"
for sub_part in reversed(path.parts[index + 1 :]):
value = f"['{sub_part}': {value}]"

commands.add(
f"if (!ctx._source{part_nest}.containsKey('{path_part}'))"
f"{{ctx._source{part_nest}['{path_part}'] = {value};}}"
f"{'' if index == len(path.parts) - 1 else' else '}"
)

else:
commands.add(
f"if (!ctx._source{part_nest}.containsKey('{path_part}'))"
f"{{Debug.explain('{path_part} in {path.path} does not exist');}}"
)

part_nest += f"['{path_part}']"

if from_path or op in ["remove", "replace", "test"]:

if isinstance(path.key, int):
commands.add(
f"if ((ctx._source{path.es_nest} instanceof ArrayList"
f" && ctx._source{path.es_nest}.size() < {abs(path.key)})"
f" || (!(ctx._source{path.es_nest} instanceof ArrayList)"
f" && !ctx._source{path.es_nest}.containsKey('{path.key}')))"
f"{{Debug.explain('{path.key} does not exist in {path.nest}');}}"
)
else:
commands.add(
f"if (!ctx._source{path.es_nest}.containsKey('{path.key}'))"
f"{{Debug.explain('{path.key} does not exist in {path.nest}');}}"
)


def remove_commands(commands: ESCommandSet, path: ElasticPath) -> None:
Expand All @@ -130,15 +150,16 @@ def remove_commands(commands: ESCommandSet, path: ElasticPath) -> None:
path (ElasticPath): Path to value to be removed

"""
if path.index is not None:
commands.add(f"def {path.variable_name};")
if isinstance(path.key, int):
commands.add(
f"def {path.variable_name} = ctx._source{path.es_location}.remove({path.index});"
f"if (ctx._source{path.es_nest} instanceof ArrayList)"
f"{{{path.variable_name} = ctx._source{path.es_nest}.remove({path.es_key});}} else "
)

else:
commands.add(
f"def {path.variable_name} = ctx._source{path.es_nest}.remove('{path.key}');"
)
commands.add(
f"{path.variable_name} = ctx._source{path.es_nest}.remove('{path.key}');"
)


def add_commands(
Expand All @@ -160,21 +181,22 @@ def add_commands(
value = (
from_path.variable_name
if operation.op == "move"
else f"ctx._source.{from_path.es_path}"
else f"ctx._source{from_path.es_path}"
)

else:
value = f"params.{path.param_key}"
params[path.param_key] = operation.value

if path.index is not None:
if isinstance(path.key, int):
commands.add(
f"if (ctx._source{path.es_location} instanceof ArrayList)"
f"{{ctx._source{path.es_location}.{'add' if operation.op in ['add', 'move'] else 'set'}({path.index}, {value})}}"
f"else{{ctx._source.{path.es_path} = {value}}}"
f"if (ctx._source{path.es_nest} instanceof ArrayList)"
f"{{ctx._source{path.es_nest}.{'add' if operation.op in ['add', 'move'] else 'set'}({path.es_key}, {value});}}"
f" else ctx._source{path.es_nest}['{path.es_key}'] = {value};"
)

else:
commands.add(f"ctx._source.{path.es_path} = {value};")
commands.add(f"ctx._source{path.es_path} = {value};")


def test_commands(
Expand All @@ -190,14 +212,23 @@ def test_commands(
value = f"params.{path.param_key}"
params[path.param_key] = operation.value

if isinstance(path.key, int):
commands.add(
f"if (ctx._source{path.es_nest} instanceof ArrayList)"
f"{{if (ctx._source{path.es_nest}[{path.es_key}] != {value})"
f"{{Debug.explain('Test failed `{path.path}`"
f" != ' + ctx._source{path.es_path});}}"
f"}} else "
)

commands.add(
f"if (ctx._source.{path.es_path} != {value})"
f"{{Debug.explain('Test failed `{path.path}` | "
f"{operation.json_value} != ' + ctx._source.{path.es_path});}}"
f"if (ctx._source{path.es_path} != {value})"
f"{{Debug.explain('Test failed `{path.path}`"
f" != ' + ctx._source{path.es_path});}}"
)


def operations_to_script(operations: List) -> Dict:
def operations_to_script(operations: List, create_nest: bool = False) -> Dict:
"""Convert list of operation to painless script.

Args:
Expand All @@ -215,10 +246,16 @@ def operations_to_script(operations: List) -> Dict:
ElasticPath(path=operation.from_) if hasattr(operation, "from_") else None
)

check_commands(commands=commands, op=operation.op, path=path)
check_commands(
commands=commands, op=operation.op, path=path, create_nest=create_nest
)
if from_path is not None:
check_commands(
commands=commands, op=operation.op, path=from_path, from_path=True
commands=commands,
op=operation.op,
path=from_path,
from_path=True,
create_nest=create_nest,
)

if operation.op in ["remove", "move"]:
Expand Down
Loading