2424from stac_fastapi .core .base_settings import ApiBaseSettings
2525from stac_fastapi .core .datetime_utils import format_datetime_range
2626from stac_fastapi .core .models .links import PagingLinks
27+ from stac_fastapi .core .redis_utils import close_redis , connect_redis
2728from stac_fastapi .core .serializers import CollectionSerializer , ItemSerializer
2829from stac_fastapi .core .session import Session
2930from stac_fastapi .core .utilities import filter_fields
@@ -237,6 +238,18 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
237238 base_url = str (request .base_url )
238239 limit = int (request .query_params .get ("limit" , os .getenv ("STAC_ITEM_LIMIT" , 10 )))
239240 token = request .query_params .get ("token" )
241+ current_url = str (request .url )
242+ redis = None
243+ try :
244+ redis = await connect_redis ()
245+
246+ if redis :
247+ current_key = "current:collections"
248+ await redis .setex (current_key , 600 , current_url )
249+
250+ except Exception as e :
251+ logger .error (f"Redis connection error: { e } " )
252+ redis = None
240253
241254 collections , next_token = await self .database .get_all_collections (
242255 token = token , limit = limit , request = request
@@ -252,10 +265,29 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
252265 },
253266 ]
254267
255- if next_token :
268+ if token and redis :
269+ prev_key = "prev:collections"
270+ previous_url = await redis .get (prev_key )
271+
272+ if previous_url :
273+ if previous_url != current_url :
274+ links .append (
275+ {
276+ "rel" : "previous" ,
277+ "type" : MimeTypes .json ,
278+ "href" : previous_url ,
279+ }
280+ )
281+
282+ if next_token and redis :
283+ prev_key = "prev:collections"
284+ await redis .setex (prev_key , 600 , current_url )
285+
256286 next_link = PagingLinks (next = next_token , request = request ).link_next ()
257287 links .append (next_link )
258288
289+ if redis :
290+ await close_redis ()
259291 return stac_types .Collections (collections = collections , links = links )
260292
261293 async def get_collection (
@@ -310,20 +342,23 @@ async def item_collection(
310342 """
311343 request : Request = kwargs ["request" ]
312344 token = request .query_params .get ("token" )
313- if not hasattr (self , '_prev_links' ):
314- self ._prev_links = {}
315-
316- session_id = request .cookies .get ('stac_session' , 'default_session' )
317- current_self_link = str (request .url )
318-
319- if session_id not in self ._prev_links :
320- self ._prev_links [session_id ] = []
321-
322- history = self ._prev_links [session_id ]
323- if not history or current_self_link != history [- 1 ]:
324- history .append (current_self_link )
325345 base_url = str (request .base_url )
326346
347+ current_url = str (request .url )
348+
349+ try :
350+ redis = await connect_redis ()
351+ except Exception as e :
352+ logger .error (f"Redis connection error: { e } " )
353+ redis = None
354+
355+ if redis :
356+ try :
357+ current_key = f"current:{ collection_id } "
358+ await redis .setex (current_key , 600 , current_url )
359+ except Exception as e :
360+ logger .error (f"Redis error: { e } " )
361+
327362 collection = await self .get_collection (
328363 collection_id = collection_id , request = request
329364 )
@@ -374,23 +409,37 @@ async def item_collection(
374409 "href" : urljoin (str (request .base_url ), f"collections/{ collection_id } " ),
375410 },
376411 {
377- "rel" : "parent" ,
412+ "rel" : "parent" ,
378413 "type" : "application/json" ,
379414 "href" : urljoin (str (request .base_url ), f"collections/{ collection_id } " ),
380- }
415+ },
381416 ]
382417
383418 paging_links = await PagingLinks (request = request , next = next_token ).get_links ()
384- history = self ._prev_links .get (session_id , [])
385- if len (history ) > 1 :
386- previous_self_link = history [- 2 ]
387- paging_links .append ({
388- "rel" : "previous" ,
389- "type" : "application/json" ,
390- "href" : previous_self_link ,
391- })
419+
420+ if token and redis :
421+ prev_key = f"prev:{ collection_id } "
422+ previous_url = await redis .get (prev_key )
423+
424+ if previous_url :
425+ # prevent looped navigation
426+ if previous_url != current_url :
427+ paging_links .append (
428+ {
429+ "rel" : "previous" ,
430+ "type" : "application/json" ,
431+ "href" : previous_url ,
432+ }
433+ )
434+ if redis and next_token :
435+ prev_key = f"prev:{ collection_id } "
436+ await redis .setex (prev_key , 600 , current_url )
437+
392438 links = collection_links + paging_links
393439
440+ if redis :
441+ await close_redis ()
442+
394443 return stac_types .ItemCollection (
395444 type = "FeatureCollection" ,
396445 features = items ,
@@ -529,6 +578,12 @@ async def post_search(
529578 HTTPException: If there is an error with the cql2_json filter.
530579 """
531580 base_url = str (request .base_url )
581+ current_url = str (request .url )
582+ try :
583+ redis = await connect_redis ()
584+ except Exception as e :
585+ logger .error (f"Redis connection error: { e } " )
586+ redis = None
532587
533588 search = self .database .make_search ()
534589
@@ -628,6 +683,26 @@ async def post_search(
628683 ]
629684 links = await PagingLinks (request = request , next = next_token ).get_links ()
630685
686+ if search_request .token and redis :
687+ prev_key = "prev:search_result"
688+ previous_url = await redis .get (prev_key )
689+
690+ if previous_url and previous_url != current_url :
691+ links .append (
692+ {
693+ "rel" : "previous" ,
694+ "type" : "application/json" ,
695+ "href" : previous_url ,
696+ }
697+ )
698+
699+ if redis and next_token :
700+ prev_key = "prev:search_result"
701+ await redis .setex (prev_key , 600 , current_url )
702+
703+ if redis :
704+ await close_redis ()
705+
631706 return stac_types .ItemCollection (
632707 type = "FeatureCollection" ,
633708 features = items ,
0 commit comments