3333from app .services .admin import AdminEventsService
3434
3535router = APIRouter (
36- prefix = "/admin/events" ,
37- tags = ["admin-events" ],
38- route_class = DishkaRoute ,
39- dependencies = [Depends (admin_user )]
36+ prefix = "/admin/events" , tags = ["admin-events" ], route_class = DishkaRoute , dependencies = [Depends (admin_user )]
4037)
4138
4239
4340@router .post ("/browse" )
44- async def browse_events (
45- request : EventBrowseRequest ,
46- service : FromDishka [AdminEventsService ]
47- ) -> EventBrowseResponse :
41+ async def browse_events (request : EventBrowseRequest , service : FromDishka [AdminEventsService ]) -> EventBrowseResponse :
4842 try :
4943 event_filter = EventFilterMapper .from_admin_pydantic (request .filters )
5044
@@ -53,15 +47,15 @@ async def browse_events(
5347 skip = request .skip ,
5448 limit = request .limit ,
5549 sort_by = request .sort_by ,
56- sort_order = request .sort_order
50+ sort_order = request .sort_order ,
5751 )
5852
5953 event_mapper = EventMapper ()
6054 return EventBrowseResponse (
6155 events = [jsonable_encoder (event_mapper .to_dict (event )) for event in result .events ],
6256 total = result .total ,
6357 skip = result .skip ,
64- limit = result .limit
58+ limit = result .limit ,
6559 )
6660
6761 except Exception as e :
@@ -70,8 +64,8 @@ async def browse_events(
7064
7165@router .get ("/stats" )
7266async def get_event_stats (
73- service : FromDishka [AdminEventsService ],
74- hours : int = Query (default = 24 , le = 168 ),
67+ service : FromDishka [AdminEventsService ],
68+ hours : int = Query (default = 24 , le = 168 ),
7569) -> EventStatsResponse :
7670 try :
7771 stats = await service .get_event_stats (hours = hours )
@@ -82,11 +76,71 @@ async def get_event_stats(
8276 raise HTTPException (status_code = 500 , detail = str (e ))
8377
8478
79+ @router .get ("/export/csv" )
80+ async def export_events_csv (
81+ service : FromDishka [AdminEventsService ],
82+ event_types : list [EventType ] | None = Query (None , description = "Event types (repeat param for multiple)" ),
83+ start_time : datetime | None = Query (None , description = "Start time" ),
84+ end_time : datetime | None = Query (None , description = "End time" ),
85+ limit : int = Query (default = 10000 , le = 50000 ),
86+ ) -> StreamingResponse :
87+ try :
88+ export_filter = EventFilterMapper .from_admin_pydantic (
89+ AdminEventFilter (
90+ event_types = event_types ,
91+ start_time = start_time ,
92+ end_time = end_time ,
93+ )
94+ )
95+ result = await service .export_events_csv_content (filter = export_filter , limit = limit )
96+ return StreamingResponse (
97+ iter ([result .content ]),
98+ media_type = result .media_type ,
99+ headers = {"Content-Disposition" : f"attachment; filename={ result .file_name } " },
100+ )
101+
102+ except Exception as e :
103+ raise HTTPException (status_code = 500 , detail = str (e ))
104+
105+
106+ @router .get ("/export/json" )
107+ async def export_events_json (
108+ service : FromDishka [AdminEventsService ],
109+ event_types : list [EventType ] | None = Query (None , description = "Event types (repeat param for multiple)" ),
110+ aggregate_id : str | None = Query (None , description = "Aggregate ID filter" ),
111+ correlation_id : str | None = Query (None , description = "Correlation ID filter" ),
112+ user_id : str | None = Query (None , description = "User ID filter" ),
113+ service_name : str | None = Query (None , description = "Service name filter" ),
114+ start_time : datetime | None = Query (None , description = "Start time" ),
115+ end_time : datetime | None = Query (None , description = "End time" ),
116+ limit : int = Query (default = 10000 , le = 50000 ),
117+ ) -> StreamingResponse :
118+ """Export events as JSON with comprehensive filtering."""
119+ try :
120+ export_filter = EventFilterMapper .from_admin_pydantic (
121+ AdminEventFilter (
122+ event_types = event_types ,
123+ aggregate_id = aggregate_id ,
124+ correlation_id = correlation_id ,
125+ user_id = user_id ,
126+ service_name = service_name ,
127+ start_time = start_time ,
128+ end_time = end_time ,
129+ )
130+ )
131+ result = await service .export_events_json_content (filter = export_filter , limit = limit )
132+ return StreamingResponse (
133+ iter ([result .content ]),
134+ media_type = result .media_type ,
135+ headers = {"Content-Disposition" : f"attachment; filename={ result .file_name } " },
136+ )
137+
138+ except Exception as e :
139+ raise HTTPException (status_code = 500 , detail = str (e ))
140+
141+
85142@router .get ("/{event_id}" )
86- async def get_event_detail (
87- event_id : str ,
88- service : FromDishka [AdminEventsService ]
89- ) -> EventDetailResponse :
143+ async def get_event_detail (event_id : str , service : FromDishka [AdminEventsService ]) -> EventDetailResponse :
90144 try :
91145 result = await service .get_event_detail (event_id )
92146
@@ -98,7 +152,7 @@ async def get_event_detail(
98152 return EventDetailResponse (
99153 event = serialized_result ["event" ],
100154 related_events = serialized_result ["related_events" ],
101- timeline = serialized_result ["timeline" ]
155+ timeline = serialized_result ["timeline" ],
102156 )
103157
104158 except HTTPException :
@@ -109,9 +163,7 @@ async def get_event_detail(
109163
110164@router .post ("/replay" )
111165async def replay_events (
112- request : EventReplayRequest ,
113- background_tasks : BackgroundTasks ,
114- service : FromDishka [AdminEventsService ]
166+ request : EventReplayRequest , background_tasks : BackgroundTasks , service : FromDishka [AdminEventsService ]
115167) -> EventReplayResponse :
116168 try :
117169 replay_correlation_id = f"replay_{ CorrelationContext .get_correlation_id ()} "
@@ -150,10 +202,7 @@ async def replay_events(
150202
151203
152204@router .get ("/replay/{session_id}/status" )
153- async def get_replay_status (
154- session_id : str ,
155- service : FromDishka [AdminEventsService ]
156- ) -> EventReplayStatusResponse :
205+ async def get_replay_status (session_id : str , service : FromDishka [AdminEventsService ]) -> EventReplayStatusResponse :
157206 try :
158207 status = await service .get_replay_status (session_id )
159208
@@ -171,84 +220,16 @@ async def get_replay_status(
171220
172221@router .delete ("/{event_id}" )
173222async def delete_event (
174- event_id : str ,
175- admin : Annotated [UserResponse , Depends (admin_user )],
176- service : FromDishka [AdminEventsService ]
223+ event_id : str , admin : Annotated [UserResponse , Depends (admin_user )], service : FromDishka [AdminEventsService ]
177224) -> EventDeleteResponse :
178225 try :
179226 deleted = await service .delete_event (event_id = event_id , deleted_by = admin .email )
180227 if not deleted :
181228 raise HTTPException (status_code = 500 , detail = "Failed to delete event" )
182229
183- return EventDeleteResponse (
184- message = "Event deleted and archived" ,
185- event_id = event_id
186- )
230+ return EventDeleteResponse (message = "Event deleted and archived" , event_id = event_id )
187231
188232 except HTTPException :
189233 raise
190234 except Exception as e :
191235 raise HTTPException (status_code = 500 , detail = str (e ))
192-
193-
194- @router .get ("/export/csv" )
195- async def export_events_csv (
196- service : FromDishka [AdminEventsService ],
197- event_types : list [EventType ] | None = Query (None , description = "Event types (repeat param for multiple)" ),
198- start_time : datetime | None = Query (None , description = "Start time" ),
199- end_time : datetime | None = Query (None , description = "End time" ),
200- limit : int = Query (default = 10000 , le = 50000 ),
201- ) -> StreamingResponse :
202- try :
203- export_filter = EventFilterMapper .from_admin_pydantic (
204- AdminEventFilter (
205- event_types = event_types ,
206- start_time = start_time ,
207- end_time = end_time ,
208- )
209- )
210- result = await service .export_events_csv_content (filter = export_filter , limit = limit )
211- return StreamingResponse (
212- iter ([result .content ]),
213- media_type = result .media_type ,
214- headers = {"Content-Disposition" : f"attachment; filename={ result .filename } " },
215- )
216-
217- except Exception as e :
218- raise HTTPException (status_code = 500 , detail = str (e ))
219-
220-
221- @router .get ("/export/json" )
222- async def export_events_json (
223- service : FromDishka [AdminEventsService ],
224- event_types : list [EventType ] | None = Query (None , description = "Event types (repeat param for multiple)" ),
225- aggregate_id : str | None = Query (None , description = "Aggregate ID filter" ),
226- correlation_id : str | None = Query (None , description = "Correlation ID filter" ),
227- user_id : str | None = Query (None , description = "User ID filter" ),
228- service_name : str | None = Query (None , description = "Service name filter" ),
229- start_time : datetime | None = Query (None , description = "Start time" ),
230- end_time : datetime | None = Query (None , description = "End time" ),
231- limit : int = Query (default = 10000 , le = 50000 ),
232- ) -> StreamingResponse :
233- """Export events as JSON with comprehensive filtering."""
234- try :
235- export_filter = EventFilterMapper .from_admin_pydantic (
236- AdminEventFilter (
237- event_types = event_types ,
238- aggregate_id = aggregate_id ,
239- correlation_id = correlation_id ,
240- user_id = user_id ,
241- service_name = service_name ,
242- start_time = start_time ,
243- end_time = end_time ,
244- )
245- )
246- result = await service .export_events_json_content (filter = export_filter , limit = limit )
247- return StreamingResponse (
248- iter ([result .content ]),
249- media_type = result .media_type ,
250- headers = {"Content-Disposition" : f"attachment; filename={ result .filename } " },
251- )
252-
253- except Exception as e :
254- raise HTTPException (status_code = 500 , detail = str (e ))
0 commit comments