9090async def _write_obj_tx (pipe : Pipeline , key : str , write_obj : dict [str , Any ]) -> None :
9191 exists : int = await pipe .exists (key )
9292 if exists :
93- await pipe .json ().set (key , "$.channel" , write_obj ["channel" ])
94- await pipe .json ().set (key , "$.type" , write_obj ["type" ])
95- await pipe .json ().set (key , "$.blob" , write_obj ["blob" ])
93+ pipe .json ().set (key , "$.channel" , write_obj ["channel" ])
94+ pipe .json ().set (key , "$.type" , write_obj ["type" ])
95+ pipe .json ().set (key , "$.blob" , write_obj ["blob" ])
9696 else :
97- await pipe .json ().set (key , "$" , write_obj )
97+ pipe .json ().set (key , "$" , write_obj )
9898
9999
100100class AsyncShallowRedisSaver (BaseRedisSaver [AsyncRedis , AsyncSearchIndex ]):
@@ -240,7 +240,7 @@ async def aput(
240240 )
241241
242242 # Add checkpoint data to pipeline
243- await pipeline .json ().set (checkpoint_key , "$" , checkpoint_data )
243+ pipeline .json ().set (checkpoint_key , "$" , checkpoint_data )
244244
245245 # Before storing the new blobs, clean up old ones that won't be needed
246246 # - Get a list of all blob keys for this thread_id and checkpoint_ns
@@ -274,7 +274,7 @@ async def aput(
274274 continue
275275 else :
276276 # This is an old version, delete it
277- await pipeline .delete (blob_key )
277+ pipeline .delete (blob_key )
278278
279279 # Store the new blob values
280280 blobs = self ._dump_blobs (
@@ -287,7 +287,7 @@ async def aput(
287287 if blobs :
288288 # Add all blob data to pipeline
289289 for key , data in blobs :
290- await pipeline .json ().set (key , "$" , data )
290+ pipeline .json ().set (key , "$" , data )
291291
292292 # Execute all operations atomically
293293 await pipeline .execute ()
@@ -571,7 +571,7 @@ async def aput_writes(
571571
572572 # If the write is for a different checkpoint_id, delete it
573573 if key_checkpoint_id != checkpoint_id :
574- await pipeline .delete (write_key )
574+ pipeline .delete (write_key )
575575
576576 # Add new writes to the pipeline
577577 upsert_case = all (w [0 ] in WRITES_IDX_MAP for w in writes )
@@ -589,17 +589,15 @@ async def aput_writes(
589589 exists = await self ._redis .exists (key )
590590 if exists :
591591 # Update existing key
592- await pipeline .json ().set (
593- key , "$.channel" , write_obj ["channel" ]
594- )
595- await pipeline .json ().set (key , "$.type" , write_obj ["type" ])
596- await pipeline .json ().set (key , "$.blob" , write_obj ["blob" ])
592+ pipeline .json ().set (key , "$.channel" , write_obj ["channel" ])
593+ pipeline .json ().set (key , "$.type" , write_obj ["type" ])
594+ pipeline .json ().set (key , "$.blob" , write_obj ["blob" ])
597595 else :
598596 # Create new key
599- await pipeline .json ().set (key , "$" , write_obj )
597+ pipeline .json ().set (key , "$" , write_obj )
600598 else :
601599 # For shallow implementation, always set the full object
602- await pipeline .json ().set (key , "$" , write_obj )
600+ pipeline .json ().set (key , "$" , write_obj )
603601
604602 # Execute all operations atomically
605603 await pipeline .execute ()
0 commit comments