@@ -336,7 +336,9 @@ class DatabaseLogic:
336336 sync_client = SyncElasticsearchSettings ().create_client
337337
338338 item_serializer : Type [ItemSerializer ] = attr .ib (default = ItemSerializer )
339- collection_serializer : Type [CollectionSerializer ] = attr .ib (default = CollectionSerializer )
339+ collection_serializer : Type [CollectionSerializer ] = attr .ib (
340+ default = CollectionSerializer
341+ )
340342
341343 extensions : List [str ] = attr .ib (default = attr .Factory (list ))
342344
@@ -370,9 +372,15 @@ class DatabaseLogic:
370372 "size" : 10000 ,
371373 }
372374 },
373- "sun_elevation_frequency" : {"histogram" : {"field" : "properties.view:sun_elevation" , "interval" : 5 }},
374- "sun_azimuth_frequency" : {"histogram" : {"field" : "properties.view:sun_azimuth" , "interval" : 5 }},
375- "off_nadir_frequency" : {"histogram" : {"field" : "properties.view:off_nadir" , "interval" : 5 }},
375+ "sun_elevation_frequency" : {
376+ "histogram" : {"field" : "properties.view:sun_elevation" , "interval" : 5 }
377+ },
378+ "sun_azimuth_frequency" : {
379+ "histogram" : {"field" : "properties.view:sun_azimuth" , "interval" : 5 }
380+ },
381+ "off_nadir_frequency" : {
382+ "histogram" : {"field" : "properties.view:off_nadir" , "interval" : 5 }
383+ },
376384 "centroid_geohash_grid_frequency" : {
377385 "geohash_grid" : {
378386 "field" : "properties.proj:centroid" ,
@@ -469,7 +477,9 @@ async def get_one_item(self, collection_id: str, item_id: str) -> Dict:
469477 id = mk_item_id (item_id , collection_id ),
470478 )
471479 except exceptions .NotFoundError :
472- raise NotFoundError (f"Item { item_id } does not exist in Collection { collection_id } " )
480+ raise NotFoundError (
481+ f"Item { item_id } does not exist in Collection { collection_id } "
482+ )
473483 return item ["_source" ]
474484
475485 @staticmethod
@@ -499,10 +509,16 @@ def apply_datetime_filter(search: Search, datetime_search):
499509 Search: The filtered search object.
500510 """
501511 if "eq" in datetime_search :
502- search = search .filter ("term" , ** {"properties__datetime" : datetime_search ["eq" ]})
512+ search = search .filter (
513+ "term" , ** {"properties__datetime" : datetime_search ["eq" ]}
514+ )
503515 else :
504- search = search .filter ("range" , properties__datetime = {"lte" : datetime_search ["lte" ]})
505- search = search .filter ("range" , properties__datetime = {"gte" : datetime_search ["gte" ]})
516+ search = search .filter (
517+ "range" , properties__datetime = {"lte" : datetime_search ["lte" ]}
518+ )
519+ search = search .filter (
520+ "range" , properties__datetime = {"gte" : datetime_search ["gte" ]}
521+ )
506522 return search
507523
508524 @staticmethod
@@ -596,7 +612,9 @@ def apply_free_text_filter(search: Search, free_text_queries: Optional[List[str]
596612 """Database logic to perform query for search endpoint."""
597613 if free_text_queries is not None :
598614 free_text_query_string = '" OR properties.\\ *:"' .join (free_text_queries )
599- search = search .query ("query_string" , query = f'properties.\\ *:"{ free_text_query_string } "' )
615+ search = search .query (
616+ "query_string" , query = f'properties.\\ *:"{ free_text_query_string } "'
617+ )
600618
601619 return search
602620
@@ -709,7 +727,11 @@ async def execute_search(
709727 if hits and (sort_array := hits [limit - 1 ].get ("sort" )):
710728 next_token = urlsafe_b64encode (json .dumps (sort_array ).encode ()).decode ()
711729
712- matched = es_response ["hits" ]["total" ]["value" ] if es_response ["hits" ]["total" ]["relation" ] == "eq" else None
730+ matched = (
731+ es_response ["hits" ]["total" ]["value" ]
732+ if es_response ["hits" ]["total" ]["relation" ] == "eq"
733+ else None
734+ )
713735 if count_task .done ():
714736 try :
715737 matched = count_task .result ().get ("count" )
@@ -789,7 +811,9 @@ async def check_collection_exists(self, collection_id: str):
789811 if not await self .client .exists (index = COLLECTIONS_INDEX , id = collection_id ):
790812 raise NotFoundError (f"Collection { collection_id } does not exist" )
791813
792- async def prep_create_item (self , item : Item , base_url : str , exist_ok : bool = False ) -> Item :
814+ async def prep_create_item (
815+ self , item : Item , base_url : str , exist_ok : bool = False
816+ ) -> Item :
793817 """
794818 Preps an item for insertion into the database.
795819
@@ -811,11 +835,15 @@ async def prep_create_item(self, item: Item, base_url: str, exist_ok: bool = Fal
811835 index = index_alias_by_collection_id (item ["collection" ]),
812836 id = mk_item_id (item ["id" ], item ["collection" ]),
813837 ):
814- raise ConflictError (f"Item { item ['id' ]} in collection { item ['collection' ]} already exists" )
838+ raise ConflictError (
839+ f"Item { item ['id' ]} in collection { item ['collection' ]} already exists"
840+ )
815841
816842 return self .item_serializer .stac_to_db (item , base_url )
817843
818- def sync_prep_create_item (self , item : Item , base_url : str , exist_ok : bool = False ) -> Item :
844+ def sync_prep_create_item (
845+ self , item : Item , base_url : str , exist_ok : bool = False
846+ ) -> Item :
819847 """
820848 Prepare an item for insertion into the database.
821849
@@ -844,7 +872,9 @@ def sync_prep_create_item(self, item: Item, base_url: str, exist_ok: bool = Fals
844872 index = index_alias_by_collection_id (collection_id ),
845873 id = mk_item_id (item_id , collection_id ),
846874 ):
847- raise ConflictError (f"Item { item_id } in collection { collection_id } already exists" )
875+ raise ConflictError (
876+ f"Item { item_id } in collection { collection_id } already exists"
877+ )
848878
849879 return self .item_serializer .stac_to_db (item , base_url )
850880
@@ -872,7 +902,9 @@ async def create_item(self, item: Item, refresh: bool = False):
872902 )
873903
874904 if (meta := es_resp .get ("meta" )) and meta .get ("status" ) == 409 :
875- raise ConflictError (f"Item { item_id } in collection { collection_id } already exists" )
905+ raise ConflictError (
906+ f"Item { item_id } in collection { collection_id } already exists"
907+ )
876908
877909 async def merge_patch_item (
878910 self ,
@@ -929,7 +961,10 @@ async def json_patch_item(
929961 script_operations = []
930962
931963 for operation in operations :
932- if operation .path in ["collection" , "id" ] and operation .op in ["add" , "replace" ]:
964+ if operation .path in ["collection" , "id" ] and operation .op in [
965+ "add" ,
966+ "replace" ,
967+ ]:
933968
934969 if operation .path == "collection" and collection_id != operation .value :
935970 await self .check_collection_exists (collection_id = operation .value )
@@ -992,7 +1027,9 @@ async def json_patch_item(
9921027
9931028 return item
9941029
995- async def delete_item (self , item_id : str , collection_id : str , refresh : bool = False ):
1030+ async def delete_item (
1031+ self , item_id : str , collection_id : str , refresh : bool = False
1032+ ):
9961033 """Delete a single item from the database.
9971034
9981035 Args:
@@ -1010,7 +1047,9 @@ async def delete_item(self, item_id: str, collection_id: str, refresh: bool = Fa
10101047 refresh = refresh ,
10111048 )
10121049 except exceptions .NotFoundError :
1013- raise NotFoundError (f"Item { item_id } in collection { collection_id } not found" )
1050+ raise NotFoundError (
1051+ f"Item { item_id } in collection { collection_id } not found"
1052+ )
10141053
10151054 async def create_collection (self , collection : Collection , refresh : bool = False ):
10161055 """Create a single collection in the database.
@@ -1057,13 +1096,17 @@ async def find_collection(self, collection_id: str) -> Collection:
10571096 collection as a `Collection` object. If the collection is not found, a `NotFoundError` is raised.
10581097 """
10591098 try :
1060- collection = await self .client .get (index = COLLECTIONS_INDEX , id = collection_id )
1099+ collection = await self .client .get (
1100+ index = COLLECTIONS_INDEX , id = collection_id
1101+ )
10611102 except exceptions .NotFoundError :
10621103 raise NotFoundError (f"Collection { collection_id } not found" )
10631104
10641105 return collection ["_source" ]
10651106
1066- async def update_collection (self , collection_id : str , collection : Collection , refresh : bool = False ):
1107+ async def update_collection (
1108+ self , collection_id : str , collection : Collection , refresh : bool = False
1109+ ):
10671110 """Update a collection from the database.
10681111
10691112 Args:
@@ -1208,10 +1251,14 @@ async def delete_collection(self, collection_id: str, refresh: bool = False):
12081251 function also calls `delete_item_index` to delete the index for the items in the collection.
12091252 """
12101253 await self .find_collection (collection_id = collection_id )
1211- await self .client .delete (index = COLLECTIONS_INDEX , id = collection_id , refresh = refresh )
1254+ await self .client .delete (
1255+ index = COLLECTIONS_INDEX , id = collection_id , refresh = refresh
1256+ )
12121257 await delete_item_index (collection_id )
12131258
1214- async def bulk_async (self , collection_id : str , processed_items : List [Item ], refresh : bool = False ) -> None :
1259+ async def bulk_async (
1260+ self , collection_id : str , processed_items : List [Item ], refresh : bool = False
1261+ ) -> None :
12151262 """Perform a bulk insert of items into the database asynchronously.
12161263
12171264 Args:
@@ -1233,7 +1280,9 @@ async def bulk_async(self, collection_id: str, processed_items: List[Item], refr
12331280 raise_on_error = False ,
12341281 )
12351282
1236- def bulk_sync (self , collection_id : str , processed_items : List [Item ], refresh : bool = False ) -> None :
1283+ def bulk_sync (
1284+ self , collection_id : str , processed_items : List [Item ], refresh : bool = False
1285+ ) -> None :
12371286 """Perform a bulk insert of items into the database synchronously.
12381287
12391288 Args:
0 commit comments