Skip to content

Commit 5929b00

Browse files
committed
Adding model for path.
1 parent 7cf36eb commit 5929b00

File tree

4 files changed

+143
-182
lines changed

4 files changed

+143
-182
lines changed

stac_fastapi/core/stac_fastapi/core/core.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -377,7 +377,7 @@ async def get_item(
377377

378378
@staticmethod
379379
def _return_date(
380-
interval: Optional[Union[DateTimeType, str]]
380+
interval: Optional[Union[DateTimeType, str]],
381381
) -> Dict[str, Optional[str]]:
382382
"""
383383
Convert a date interval.
@@ -738,13 +738,15 @@ async def merge_patch_item(
738738
stac_types.Item: The patched item object.
739739
740740
"""
741+
base_url = str(kwargs["request"].base_url)
742+
741743
item = await self.database.merge_patch_item(
742744
collection_id=collection_id,
743745
item_id=item_id,
744746
item=item,
745-
base_url=str(kwargs["request"].base_url),
747+
base_url=base_url,
746748
)
747-
return ItemSerializer.db_to_stac(item, base_url=str(kwargs["request"].base_url))
749+
return ItemSerializer.db_to_stac(item, base_url=base_url)
748750

749751
@overrides
750752
async def json_patch_item(
@@ -766,13 +768,15 @@ async def json_patch_item(
766768
stac_types.Item: The patched item object.
767769
768770
"""
771+
base_url = str(kwargs["request"].base_url)
772+
769773
item = await self.database.json_patch_item(
770774
collection_id=collection_id,
771775
item_id=item_id,
772-
base_url=str(kwargs["request"].base_url),
776+
base_url=base_url,
773777
operations=operations,
774778
)
775-
return ItemSerializer.db_to_stac(item, base_url=str(kwargs["request"].base_url))
779+
return ItemSerializer.db_to_stac(item, base_url=base_url)
776780

777781
@overrides
778782
async def delete_item(
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
"""patch helpers."""
2+
3+
from typing import Optional
4+
5+
from pydantic import BaseModel, computed_field
6+
7+
8+
class ElasticPath(BaseModel):
9+
"""Converts a JSON path to an Elasticsearch path.
10+
11+
Args:
12+
path (str): JSON path to be converted.
13+
14+
"""
15+
16+
path: str
17+
nest: Optional[str] = None
18+
partition: Optional[str] = None
19+
key: Optional[str] = None
20+
index: Optional[int] = None
21+
22+
def __init__(self, *, path: str):
23+
self.path = path.lstrip("/").replace("/", ".")
24+
25+
self.nest, self.partition, self.key = path.rpartition(".")
26+
27+
if self.key.isdigit():
28+
self.index = int(self.key)
29+
self.path = f"{self.nest}[{self.index}]"
30+
self.nest, self.partition, self.key = self.nest.rpartition(".")
31+
32+
@computed_field
33+
@property
34+
def location(self):
35+
return self.nest + self.partition + self.key

stac_fastapi/core/stac_fastapi/core/utilities.py

Lines changed: 39 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,9 @@
44
such as converting bounding boxes to polygon representations.
55
"""
66

7-
import json
87
from typing import Any, Dict, List, Optional, Set, Union
98

9+
from stac_fastapi.core.models.patch import ElasticPath
1010
from stac_fastapi.types.stac import Item, PatchAddReplaceTest, PatchRemove
1111

1212
MAX_LIMIT = 10000
@@ -45,9 +45,7 @@ def filter_fields( # noqa: C901
4545
return item
4646

4747
# Build a shallow copy of included fields on an item, or a sub-tree of an item
48-
def include_fields(
49-
source: Dict[str, Any], fields: Optional[Set[str]]
50-
) -> Dict[str, Any]:
48+
def include_fields(source: Dict[str, Any], fields: Optional[Set[str]]) -> Dict[str, Any]:
5149
if not fields:
5250
return source
5351

@@ -60,9 +58,7 @@ def include_fields(
6058
# The root of this key path on the item is a dict, and the
6159
# key path indicates a sub-key to be included. Walk the dict
6260
# from the root key and get the full nested value to include.
63-
value = include_fields(
64-
source[key_root], fields={".".join(key_path_parts[1:])}
65-
)
61+
value = include_fields(source[key_root], fields={".".join(key_path_parts[1:])})
6662

6763
if isinstance(clean_item.get(key_root), dict):
6864
# A previously specified key and sub-keys may have been included
@@ -93,9 +89,7 @@ def exclude_fields(source: Dict[str, Any], fields: Optional[Set[str]]) -> None:
9389
if key_root in source:
9490
if isinstance(source[key_root], dict) and len(key_path_part) > 1:
9591
# Walk the nested path of this key to remove the leaf-key
96-
exclude_fields(
97-
source[key_root], fields={".".join(key_path_part[1:])}
98-
)
92+
exclude_fields(source[key_root], fields={".".join(key_path_part[1:])})
9993
# If, after removing the leaf-key, the root is now an empty
10094
# dict, remove it entirely
10195
if not source[key_root]:
@@ -127,11 +121,7 @@ def dict_deep_update(merge_to: Dict[str, Any], merge_from: Dict[str, Any]) -> No
127121
merge_from values take precedence over existing values in merge_to.
128122
"""
129123
for k, v in merge_from.items():
130-
if (
131-
k in merge_to
132-
and isinstance(merge_to[k], dict)
133-
and isinstance(merge_from[k], dict)
134-
):
124+
if k in merge_to and isinstance(merge_to[k], dict) and isinstance(merge_from[k], dict):
135125
dict_deep_update(merge_to[k], merge_from[k])
136126
else:
137127
merge_to[k] = v
@@ -166,43 +156,25 @@ def merge_to_operations(data: Dict) -> List:
166156
return operations
167157

168158

169-
def split_json_path(path: str) -> OperationPath:
170-
"""Split a JSON path into it's components.
159+
def add_script_checks(source: str, op: str, path: ElasticPath) -> str:
160+
"""Add Elasticsearch checks to operation.
171161
172162
Args:
173-
path: JSON path.
163+
source (str): current source of Elasticsearch script
164+
op (str): the operation of script
165+
path (Dict): path of variable to run operation on
174166
175167
Returns:
176-
Tuple: nest, partition, key.
168+
Dict: update source of Elasticsearch script
177169
"""
178-
path = (
179-
path[1:].replace("/", ".") if path.startswith("/") else path.replace("/", ".")
180-
)
181-
nest, partition, key = path.rpartition(".")
170+
if path.nest:
171+
source += f"if (!ctx._source.containsKey('{path.nest}'))" f"{{Debug.explain('{path.nest} does not exist');}}"
182172

183-
try:
184-
index = int(key)
185-
path = f"{nest}[{index}]"
186-
nest, partition, key = nest.rpartition(".")
187-
188-
except ValueError:
189-
index = None
190-
191-
return {
192-
"path": path,
193-
"nest": nest,
194-
"partition": partition,
195-
"key": key,
196-
"index": index,
197-
}
198-
199-
200-
def script_checks(source, op, path) -> Dict:
201-
if path["nest"]:
202-
source += f"if (!ctx._source.containsKey('{path['nest']}')){{Debug.explain('{path['nest']} does not exist');}}"
203-
204-
if path["index"] or op != "add":
205-
source += f"if (!ctx._source.{path['nest'] + path['partition']}containsKey('{path['key']}')){{Debug.explain('{path['path']} does not exist');}}"
173+
if path.index or op != "add":
174+
source += (
175+
f"if (!ctx._source.{path.nest}.containsKey('{path.key}'))"
176+
f"{{Debug.explain('{path.path} does not exist');}}"
177+
)
206178

207179
return source
208180

@@ -218,55 +190,51 @@ def operations_to_script(operations: List) -> Dict:
218190
"""
219191
source = ""
220192
for operation in operations:
221-
op_path = split_json_path(operation.path)
193+
op_path = ElasticPath(path=operation.path)
222194

223195
if hasattr(operation, "from"):
224-
from_path = split_json_path(getattr(operation, "from"))
225-
source = script_checks(source, operation.op, op_path)
226-
if from_path["index"]:
227-
from_key = from_path["nest"] + from_path["partition"] + from_path["key"]
196+
from_path = ElasticPath(path=(getattr(operation, "from")))
197+
source = add_script_checks(source, operation.op, from_path)
198+
if from_path.index:
228199
source += (
229-
f"if ((ctx._source.{from_key} instanceof ArrayList && ctx._source.{from_key}.size() < {from_path['index']})"
230-
f"|| (!ctx._source.{from_key + from_path['partition']}containsKey('{from_path['index']}'))"
231-
f"{{Debug.explain('{from_path['path']} does not exist');}}"
200+
f"if ((ctx._source.{from_path.location} instanceof ArrayList"
201+
f" && ctx._source.{from_path.location}.size() < {from_path.index})"
202+
f" || (!ctx._source.{from_path.location}.containsKey('{from_path.index}'))"
203+
f"{{Debug.explain('{from_path.path} does not exist');}}"
232204
)
233205

234-
source = script_checks(source, operation.op, op_path)
206+
source = add_script_checks(source, operation.op, op_path)
235207

236208
if operation.op in ["copy", "move"]:
237-
if op_path["index"]:
209+
if op_path.index:
238210
source += (
239-
f"if (ctx._source.{op_path['nest'] + op_path['partition'] + op_path['key']} instanceof ArrayList)"
240-
f"{{ctx._source.{op_path['nest'] + op_path['partition'] + op_path['key'] + op_path['partition']}add({op_path['index']}, {from_path['path']})}}"
241-
f"else{{ctx._source.{op_path['path']} = {from_path['path']}}}"
211+
f"if (ctx._source.{op_path.location} instanceof ArrayList)"
212+
f"{{ctx._source.{op_path.location}.add({op_path.index}, {from_path.path})}}"
213+
f"else{{ctx._source.{op_path.path} = {from_path.path}}}"
242214
)
243215

244216
else:
245-
source += (
246-
f"ctx._source.{op_path['path']} = ctx._source.{from_path['path']};"
247-
)
217+
source += f"ctx._source.{op_path.path} = ctx._source.{from_path.path};"
248218

249219
if operation.op in ["remove", "move"]:
250220
remove_path = from_path if operation.op == "move" else op_path
251221

252-
if remove_path["index"]:
253-
source += f"ctx._source.{remove_path['nest'] + remove_path['partition'] + remove_path['key'] + remove_path['partition']}remove('{remove_path['index']}');"
222+
if remove_path.index:
223+
source += f"ctx._source.{remove_path.location}.remove('{remove_path.index}');"
254224

255225
else:
256-
source += f"ctx._source.{remove_path['nest'] + remove_path['partition']}remove('{remove_path['key']}');"
226+
source += f"ctx._source.remove('{remove_path.location}');"
257227

258228
if operation.op in ["add", "replace"]:
259229
if op_path["index"]:
260230
source += (
261-
f"if (ctx._source.{op_path['nest'] + op_path['partition'] + op_path['key']} instanceof ArrayList)"
262-
f"{{ctx._source.{op_path['nest'] + op_path['partition'] + op_path['key'] + op_path['partition']}add({op_path['index']}, {json.dumps(operation.value)})}}"
263-
f"else{{ctx._source.{op_path['path']} = {json.dumps(operation.value)}}}"
231+
f"if (ctx._source.{op_path.location} instanceof ArrayList)"
232+
f"{{ctx._source.{op_path.location}.add({op_path.index}, {operation.json_value})}}"
233+
f"else{{ctx._source.{op_path.path} = {operation.json_value}}}"
264234
)
265235

266236
else:
267-
source += (
268-
f"ctx._source.{op_path['path']} = {json.dumps(operation.value)};"
269-
)
237+
source += f"ctx._source.{op_path.path} = {operation.json_value};"
270238

271239
return {
272240
"source": source,

0 commit comments

Comments
 (0)