4444from stac_fastapi .types .extension import ApiExtension
4545from stac_fastapi .types .requests import get_base_url
4646from stac_fastapi .types .search import BaseSearchPostRequest
47+ from stac_fastapi .core .redis_utils import connect_redis , close_redis
4748
4849logger = logging .getLogger (__name__ )
4950
@@ -237,6 +238,18 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
237238 base_url = str (request .base_url )
238239 limit = int (request .query_params .get ("limit" , os .getenv ("STAC_ITEM_LIMIT" , 10 )))
239240 token = request .query_params .get ("token" )
241+ current_url = str (request .url )
242+ redis = None
243+ try :
244+ redis = await connect_redis ()
245+
246+ if redis :
247+ current_key = "current:collections"
248+ await redis .setex (current_key , 600 , current_url )
249+
250+ except Exception as e :
251+ logger .error (f"Redis connection error: { e } " )
252+ redis = None
240253
241254 collections , next_token = await self .database .get_all_collections (
242255 token = token , limit = limit , request = request
@@ -252,10 +265,27 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
252265 },
253266 ]
254267
255- if next_token :
268+ if token and redis :
269+ prev_key = "prev:collections"
270+ previous_url = await redis .get (prev_key )
271+
272+ if previous_url :
273+ if previous_url != current_url :
274+ links .append ({
275+ "rel" : "previous" ,
276+ "type" : MimeTypes .json ,
277+ "href" : previous_url ,
278+ })
279+
280+ if next_token and redis :
281+ prev_key = "prev:collections"
282+ await redis .setex (prev_key , 600 , current_url )
283+
256284 next_link = PagingLinks (next = next_token , request = request ).link_next ()
257285 links .append (next_link )
258286
287+ if redis :
288+ await redis .close ()
259289 return stac_types .Collections (collections = collections , links = links )
260290
261291 async def get_collection (
@@ -310,20 +340,23 @@ async def item_collection(
310340 """
311341 request : Request = kwargs ["request" ]
312342 token = request .query_params .get ("token" )
313- if not hasattr (self , '_prev_links' ):
314- self ._prev_links = {}
315-
316- session_id = request .cookies .get ('stac_session' , 'default_session' )
317- current_self_link = str (request .url )
318-
319- if session_id not in self ._prev_links :
320- self ._prev_links [session_id ] = []
321-
322- history = self ._prev_links [session_id ]
323- if not history or current_self_link != history [- 1 ]:
324- history .append (current_self_link )
325343 base_url = str (request .base_url )
326344
345+ current_url = str (request .url )
346+
347+ try :
348+ redis = await connect_redis ()
349+ except Exception as e :
350+ logger .error (f"Redis connection error: { e } " )
351+ redis = None
352+
353+ if redis :
354+ try :
355+ current_key = f"current:{ collection_id } "
356+ await redis .setex (current_key , 600 , current_url )
357+ except Exception as e :
358+ logger .error (f"Redis error: { e } " )
359+
327360 collection = await self .get_collection (
328361 collection_id = collection_id , request = request
329362 )
@@ -381,16 +414,28 @@ async def item_collection(
381414 ]
382415
383416 paging_links = await PagingLinks (request = request , next = next_token ).get_links ()
384- history = self ._prev_links .get (session_id , [])
385- if len (history ) > 1 :
386- previous_self_link = history [- 2 ]
387- paging_links .append ({
388- "rel" : "previous" ,
389- "type" : "application/json" ,
390- "href" : previous_self_link ,
391- })
417+
418+ if token and redis :
419+ prev_key = f"prev:{ collection_id } "
420+ previous_url = await redis .get (prev_key )
421+
422+ if previous_url :
423+ # prevent looped navigation
424+ if previous_url != current_url :
425+ paging_links .append ({
426+ "rel" : "previous" ,
427+ "type" : "application/json" ,
428+ "href" : previous_url ,
429+ })
430+ if redis and next_token :
431+ prev_key = f"prev:{ collection_id } "
432+ await redis .setex (prev_key , 600 , current_url )
433+
392434 links = collection_links + paging_links
393435
436+ if redis :
437+ await redis .close ()
438+
394439 return stac_types .ItemCollection (
395440 type = "FeatureCollection" ,
396441 features = items ,
@@ -529,6 +574,12 @@ async def post_search(
529574 HTTPException: If there is an error with the cql2_json filter.
530575 """
531576 base_url = str (request .base_url )
577+ current_url = str (request .url )
578+ try :
579+ redis = await connect_redis ()
580+ except Exception as e :
581+ logger .error (f"Redis connection error: { e } " )
582+ redis = None
532583
533584 search = self .database .make_search ()
534585
@@ -628,6 +679,24 @@ async def post_search(
628679 ]
629680 links = await PagingLinks (request = request , next = next_token ).get_links ()
630681
682+ if search_request .token and redis :
683+ prev_key = f"prev:search_result"
684+ previous_url = await redis .get (prev_key )
685+
686+ if previous_url and previous_url != current_url :
687+ links .append ({
688+ "rel" : "previous" ,
689+ "type" : "application/json" ,
690+ "href" : previous_url ,
691+ })
692+
693+ if redis and next_token :
694+ prev_key = f"prev:search_result"
695+ await redis .setex (prev_key , 600 , current_url )
696+
697+ if redis :
698+ await redis .close ()
699+
631700 return stac_types .ItemCollection (
632701 type = "FeatureCollection" ,
633702 features = items ,
0 commit comments