@@ -59,14 +59,14 @@ def __init__(self, repository: AdminEventsRepository, replay_service: ReplayServ
5959 async def browse_events (
6060 self ,
6161 * ,
62- filter : EventFilter ,
62+ event_filter : EventFilter ,
6363 skip : int ,
6464 limit : int ,
6565 sort_by : str ,
6666 sort_order : int ,
6767 ) -> EventBrowseResult :
6868 return await self ._repo .browse_events (
69- filter = filter , skip = skip , limit = limit , sort_by = sort_by , sort_order = sort_order
69+ event_filter = event_filter , skip = skip , limit = limit , sort_by = sort_by , sort_order = sort_order
7070 )
7171
7272 async def get_event_detail (self , event_id : str ) -> EventDetail | None :
@@ -180,12 +180,12 @@ async def get_replay_status(self, session_id: str) -> ReplaySessionStatusDetail
180180 status = await self ._repo .get_replay_status_with_progress (session_id )
181181 return status
182182
183- async def export_events_csv (self , filter : EventFilter ) -> List [EventExportRow ]:
184- rows = await self ._repo .export_events_csv (filter )
183+ async def export_events_csv (self , event_filter : EventFilter ) -> List [EventExportRow ]:
184+ rows = await self ._repo .export_events_csv (event_filter )
185185 return rows
186186
187- async def export_events_csv_content (self , * , filter : EventFilter , limit : int ) -> ExportResult :
188- rows = await self ._repo .export_events_csv (filter )
187+ async def export_events_csv_content (self , * , event_filter : EventFilter , limit : int ) -> ExportResult :
188+ rows = await self ._repo .export_events_csv (event_filter )
189189 output = StringIO ()
190190 writer = csv .DictWriter (
191191 output ,
@@ -216,8 +216,10 @@ async def export_events_csv_content(self, *, filter: EventFilter, limit: int) ->
216216 )
217217 return ExportResult (file_name = filename , content = output .getvalue (), media_type = "text/csv" )
218218
219- async def export_events_json_content (self , * , filter : EventFilter , limit : int ) -> ExportResult :
220- result = await self ._repo .browse_events (filter = filter , skip = 0 , limit = limit , sort_by = "timestamp" , sort_order = - 1 )
219+ async def export_events_json_content (self , * , event_filter : EventFilter , limit : int ) -> ExportResult :
220+ result = await self ._repo .browse_events (
221+ event_filter = event_filter , skip = 0 , limit = limit , sort_by = "timestamp" , sort_order = - 1
222+ )
221223 event_mapper = EventMapper ()
222224 events_data : list [dict [str , Any ]] = []
223225 for event in result .events :
@@ -232,13 +234,13 @@ async def export_events_json_content(self, *, filter: EventFilter, limit: int) -
232234 "exported_at" : datetime .now (timezone .utc ).isoformat (),
233235 "total_events" : len (events_data ),
234236 "filters_applied" : {
235- "event_types" : filter .event_types ,
236- "aggregate_id" : filter .aggregate_id ,
237- "correlation_id" : filter .correlation_id ,
238- "user_id" : filter .user_id ,
239- "service_name" : filter .service_name ,
240- "start_time" : filter .start_time .isoformat () if filter .start_time else None ,
241- "end_time" : filter .end_time .isoformat () if filter .end_time else None ,
237+ "event_types" : event_filter .event_types ,
238+ "aggregate_id" : event_filter .aggregate_id ,
239+ "correlation_id" : event_filter .correlation_id ,
240+ "user_id" : event_filter .user_id ,
241+ "service_name" : event_filter .service_name ,
242+ "start_time" : event_filter .start_time .isoformat () if event_filter .start_time else None ,
243+ "end_time" : event_filter .end_time .isoformat () if event_filter .end_time else None ,
242244 },
243245 "export_limit" : limit ,
244246 },
0 commit comments