diff --git a/backend/app/api/routes/admin/events.py b/backend/app/api/routes/admin/events.py index 681c7146..89a802ba 100644 --- a/backend/app/api/routes/admin/events.py +++ b/backend/app/api/routes/admin/events.py @@ -12,11 +12,7 @@ from app.domain.enums.events import EventType from app.infrastructure.mappers import ( AdminReplayApiMapper, - EventDetailMapper, EventFilterMapper, - EventMapper, - EventStatisticsMapper, - ReplaySessionMapper, ) from app.schemas_pydantic.admin_events import ( EventBrowseRequest, @@ -50,9 +46,8 @@ async def browse_events(request: EventBrowseRequest, service: FromDishka[AdminEv sort_order=request.sort_order, ) - event_mapper = EventMapper() return EventBrowseResponse( - events=[jsonable_encoder(event_mapper.to_dict(event)) for event in result.events], + events=[jsonable_encoder(event) for event in result.events], total=result.total, skip=result.skip, limit=result.limit, @@ -69,8 +64,7 @@ async def get_event_stats( ) -> EventStatsResponse: try: stats = await service.get_event_stats(hours=hours) - stats_mapper = EventStatisticsMapper() - return EventStatsResponse(**stats_mapper.to_dict(stats)) + return EventStatsResponse.model_validate(stats) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @@ -147,12 +141,10 @@ async def get_event_detail(event_id: str, service: FromDishka[AdminEventsService if not result: raise HTTPException(status_code=404, detail="Event not found") - detail_mapper = EventDetailMapper() - serialized_result = jsonable_encoder(detail_mapper.to_dict(result)) return EventDetailResponse( - event=serialized_result["event"], - related_events=serialized_result["related_events"], - timeline=serialized_result["timeline"], + event=jsonable_encoder(result.event), + related_events=[jsonable_encoder(e) for e in result.related_events], + timeline=[jsonable_encoder(e) for e in result.timeline], ) except HTTPException: @@ -209,8 +201,7 @@ async def get_replay_status(session_id: str, service: FromDishka[AdminEventsServ if not status: raise HTTPException(status_code=404, detail="Replay session not found") - replay_mapper = ReplaySessionMapper() - return EventReplayStatusResponse(**replay_mapper.status_detail_to_dict(status)) + return EventReplayStatusResponse.model_validate(status) except HTTPException: raise diff --git a/backend/app/api/routes/admin/users.py b/backend/app/api/routes/admin/users.py index c630a274..94ad2741 100644 --- a/backend/app/api/routes/admin/users.py +++ b/backend/app/api/routes/admin/users.py @@ -9,8 +9,12 @@ from app.domain.enums.user import UserRole from app.domain.rate_limit import UserRateLimit from app.domain.user import UserUpdate as DomainUserUpdate -from app.infrastructure.mappers import AdminOverviewApiMapper, UserMapper -from app.schemas_pydantic.admin_user_overview import AdminUserOverview +from app.schemas_pydantic.admin_user_overview import ( + AdminUserOverview, + DerivedCounts, + RateLimitSummary, +) +from app.schemas_pydantic.events import EventResponse, EventStatistics from app.schemas_pydantic.user import ( DeleteUserResponse, MessageResponse, @@ -48,17 +52,20 @@ async def list_users( role=role, ) - user_mapper = UserMapper() summaries = await rate_limit_service.get_user_rate_limit_summaries([u.user_id for u in result.users]) user_responses: list[UserResponse] = [] for user in result.users: - user_dict = user_mapper.to_response_dict(user) + user_response = UserResponse.model_validate(user) summary = summaries.get(user.user_id) if summary: - user_dict["bypass_rate_limit"] = summary.bypass_rate_limit - user_dict["global_multiplier"] = summary.global_multiplier - user_dict["has_custom_limits"] = summary.has_custom_limits - user_responses.append(UserResponse(**user_dict)) + user_response = user_response.model_copy( + update={ + "bypass_rate_limit": summary.bypass_rate_limit, + "global_multiplier": summary.global_multiplier, + "has_custom_limits": summary.has_custom_limits, + } + ) + user_responses.append(user_response) return UserListResponse( users=user_responses, @@ -80,8 +87,7 @@ async def create_user( domain_user = await admin_user_service.create_user(admin_username=admin.username, user_data=user_data) except ValueError as ve: raise HTTPException(status_code=400, detail=str(ve)) - user_mapper = UserMapper() - return UserResponse(**user_mapper.to_response_dict(domain_user)) + return UserResponse.model_validate(domain_user) @router.get("/{user_id}", response_model=UserResponse) @@ -94,8 +100,7 @@ async def get_user( if not user: raise HTTPException(status_code=404, detail="User not found") - user_mapper = UserMapper() - return UserResponse(**user_mapper.to_response_dict(user)) + return UserResponse.model_validate(user) @router.get("/{user_id}/overview", response_model=AdminUserOverview) @@ -109,8 +114,13 @@ async def get_user_overview( domain = await admin_user_service.get_user_overview(user_id=user_id, hours=24) except ValueError: raise HTTPException(status_code=404, detail="User not found") - mapper = AdminOverviewApiMapper() - return mapper.to_response(domain) + return AdminUserOverview( + user=UserResponse.model_validate(domain.user), + stats=EventStatistics.model_validate(domain.stats), + derived_counts=DerivedCounts.model_validate(domain.derived_counts), + rate_limit_summary=RateLimitSummary.model_validate(domain.rate_limit_summary), + recent_events=[EventResponse.model_validate(e).model_dump() for e in domain.recent_events], + ) @router.put("/{user_id}", response_model=UserResponse) @@ -141,8 +151,7 @@ async def update_user( if not updated_user: raise HTTPException(status_code=500, detail="Failed to update user") - user_mapper = UserMapper() - return UserResponse(**user_mapper.to_response_dict(updated_user)) + return UserResponse.model_validate(updated_user) @router.delete("/{user_id}", response_model=DeleteUserResponse) diff --git a/backend/app/api/routes/dlq.py b/backend/app/api/routes/dlq.py index 5123b3fd..ffb0b23a 100644 --- a/backend/app/api/routes/dlq.py +++ b/backend/app/api/routes/dlq.py @@ -59,27 +59,8 @@ async def get_dlq_messages( status=status, topic=topic, event_type=event_type, limit=limit, offset=offset ) - # Convert domain messages to response models - messages = [ - DLQMessageResponse( - event_id=msg.event_id or "unknown", - event_type=msg.event_type, - original_topic=msg.original_topic, - error=msg.error, - retry_count=msg.retry_count, - failed_at=msg.failed_at or datetime(1970, 1, 1, tzinfo=timezone.utc), - status=DLQMessageStatus(msg.status), - age_seconds=msg.age_seconds, - details={ - "producer_id": msg.producer_id, - "dlq_offset": msg.dlq_offset, - "dlq_partition": msg.dlq_partition, - "last_error": msg.last_error, - "next_retry_at": msg.next_retry_at, - }, - ) - for msg in result.messages - ] + # Convert domain messages to response models using model_validate + messages = [DLQMessageResponse.model_validate(msg) for msg in result.messages] return DLQMessagesResponse(messages=messages, total=result.total, offset=result.offset, limit=result.limit) @@ -163,15 +144,4 @@ async def discard_dlq_message( @router.get("/topics", response_model=List[DLQTopicSummaryResponse]) async def get_dlq_topics(repository: FromDishka[DLQRepository]) -> List[DLQTopicSummaryResponse]: topics = await repository.get_topics_summary() - return [ - DLQTopicSummaryResponse( - topic=topic.topic, - total_messages=topic.total_messages, - status_breakdown=topic.status_breakdown, - oldest_message=topic.oldest_message, - newest_message=topic.newest_message, - avg_retry_count=topic.avg_retry_count, - max_retry_count=topic.max_retry_count, - ) - for topic in topics - ] + return [DLQTopicSummaryResponse.model_validate(topic) for topic in topics] diff --git a/backend/app/api/routes/events.py b/backend/app/api/routes/events.py index 017625eb..7fd6303d 100644 --- a/backend/app/api/routes/events.py +++ b/backend/app/api/routes/events.py @@ -12,8 +12,7 @@ from app.core.utils import get_client_ip from app.domain.enums.common import SortOrder from app.domain.events.event_models import EventFilter -from app.infrastructure.kafka.events.metadata import EventMetadata -from app.infrastructure.mappers import EventMapper, EventStatisticsMapper +from app.infrastructure.kafka.events.metadata import AvroEventMetadata as EventMetadata from app.schemas_pydantic.events import ( DeleteEventResponse, EventAggregationRequest, @@ -39,21 +38,30 @@ async def get_execution_events( current_user: Annotated[UserResponse, Depends(current_user)], event_service: FromDishka[EventService], include_system_events: bool = Query(False, description="Include system-generated events"), + limit: int = Query(100, ge=1, le=1000), + skip: int = Query(0, ge=0), ) -> EventListResponse: - mapper = EventMapper() - events = await event_service.get_execution_events( + result = await event_service.get_execution_events( execution_id=execution_id, user_id=current_user.user_id, user_role=current_user.role, include_system_events=include_system_events, + limit=limit, + skip=skip, ) - if events is None: + if result is None: raise HTTPException(status_code=403, detail="Access denied") - event_responses = [EventResponse(**mapper.to_dict(event)) for event in events] + event_responses = [EventResponse.model_validate(event) for event in result.events] - return EventListResponse(events=event_responses, total=len(event_responses), limit=1000, skip=0, has_more=False) + return EventListResponse( + events=event_responses, + total=result.total, + limit=limit, + skip=skip, + has_more=result.has_more, + ) @router.get("/user", response_model=EventListResponse) @@ -68,7 +76,6 @@ async def get_user_events( sort_order: SortOrder = Query(SortOrder.DESC), ) -> EventListResponse: """Get events for the current user""" - mapper = EventMapper() result = await event_service.get_user_events_paginated( user_id=current_user.user_id, event_types=event_types, @@ -79,7 +86,7 @@ async def get_user_events( sort_order=sort_order, ) - event_responses = [EventResponse(**mapper.to_dict(event)) for event in result.events] + event_responses = [EventResponse.model_validate(event) for event in result.events] return EventListResponse( events=event_responses, total=result.total, limit=limit, skip=skip, has_more=result.has_more @@ -92,7 +99,6 @@ async def query_events( filter_request: EventFilterRequest, event_service: FromDishka[EventService], ) -> EventListResponse: - mapper = EventMapper() event_filter = EventFilter( event_types=[str(et) for et in filter_request.event_types] if filter_request.event_types else None, aggregate_id=filter_request.aggregate_id, @@ -116,7 +122,7 @@ async def query_events( if result is None: raise HTTPException(status_code=403, detail="Cannot query other users' events") - event_responses = [EventResponse(**mapper.to_dict(event)) for event in result.events] + event_responses = [EventResponse.model_validate(event) for event in result.events] return EventListResponse( events=event_responses, total=result.total, limit=result.limit, skip=result.skip, has_more=result.has_more @@ -130,19 +136,26 @@ async def get_events_by_correlation( event_service: FromDishka[EventService], include_all_users: bool = Query(False, description="Include events from all users (admin only)"), limit: int = Query(100, ge=1, le=1000), + skip: int = Query(0, ge=0), ) -> EventListResponse: - mapper = EventMapper() - events = await event_service.get_events_by_correlation( + result = await event_service.get_events_by_correlation( correlation_id=correlation_id, user_id=current_user.user_id, user_role=current_user.role, include_all_users=include_all_users, limit=limit, + skip=skip, ) - event_responses = [EventResponse(**mapper.to_dict(event)) for event in events] + event_responses = [EventResponse.model_validate(event) for event in result.events] - return EventListResponse(events=event_responses, total=len(event_responses), limit=limit, skip=0, has_more=False) + return EventListResponse( + events=event_responses, + total=result.total, + limit=limit, + skip=skip, + has_more=result.has_more, + ) @router.get("/current-request", response_model=EventListResponse) @@ -150,23 +163,30 @@ async def get_current_request_events( current_user: Annotated[UserResponse, Depends(current_user)], event_service: FromDishka[EventService], limit: int = Query(100, ge=1, le=1000), + skip: int = Query(0, ge=0), ) -> EventListResponse: - mapper = EventMapper() correlation_id = CorrelationContext.get_correlation_id() if not correlation_id: - return EventListResponse(events=[], total=0, limit=limit, skip=0, has_more=False) + return EventListResponse(events=[], total=0, limit=limit, skip=skip, has_more=False) - events = await event_service.get_events_by_correlation( + result = await event_service.get_events_by_correlation( correlation_id=correlation_id, user_id=current_user.user_id, user_role=current_user.role, include_all_users=False, limit=limit, + skip=skip, ) - event_responses = [EventResponse(**mapper.to_dict(event)) for event in events] + event_responses = [EventResponse.model_validate(event) for event in result.events] - return EventListResponse(events=event_responses, total=len(event_responses), limit=limit, skip=0, has_more=False) + return EventListResponse( + events=event_responses, + total=result.total, + limit=limit, + skip=skip, + has_more=result.has_more, + ) @router.get("/statistics", response_model=EventStatistics) @@ -190,8 +210,7 @@ async def get_event_statistics( include_all_users=include_all_users, ) - stats_mapper = EventStatisticsMapper() - return EventStatistics(**stats_mapper.to_dict(stats)) + return EventStatistics.model_validate(stats) @router.get("/{event_id}", response_model=EventResponse) @@ -199,11 +218,10 @@ async def get_event( event_id: str, current_user: Annotated[UserResponse, Depends(current_user)], event_service: FromDishka[EventService] ) -> EventResponse: """Get a specific event by ID""" - mapper = EventMapper() event = await event_service.get_event(event_id=event_id, user_id=current_user.user_id, user_role=current_user.role) if event is None: raise HTTPException(status_code=404, detail="Event not found") - return EventResponse(**mapper.to_dict(event)) + return EventResponse.model_validate(event) @router.post("/publish", response_model=PublishEventResponse) diff --git a/backend/app/api/routes/execution.py b/backend/app/api/routes/execution.py index d714a5f5..df1d40d4 100644 --- a/backend/app/api/routes/execution.py +++ b/backend/app/api/routes/execution.py @@ -14,8 +14,7 @@ from app.domain.enums.execution import ExecutionStatus from app.domain.enums.user import UserRole from app.infrastructure.kafka.events.base import BaseEvent -from app.infrastructure.kafka.events.metadata import EventMetadata -from app.infrastructure.mappers import ExecutionApiMapper +from app.infrastructure.kafka.events.metadata import AvroEventMetadata as EventMetadata from app.schemas_pydantic.execution import ( CancelExecutionRequest, CancelResponse, @@ -118,7 +117,7 @@ async def create_execution( # Store result for idempotency if key was provided if idempotency_key and pseudo_event: - response_model = ExecutionApiMapper.to_response(exec_result) + response_model = ExecutionResponse.model_validate(exec_result) await idempotency_manager.mark_completed_with_json( event=pseudo_event, cached_json=response_model.model_dump_json(), @@ -126,7 +125,7 @@ async def create_execution( custom_key=f"http:{current_user.user_id}:{idempotency_key}", ) - return ExecutionApiMapper.to_response(exec_result) + return ExecutionResponse.model_validate(exec_result) except IntegrationException as e: # Mark as failed for idempotency @@ -230,7 +229,7 @@ async def retry_execution( client_ip=client_ip, user_agent=user_agent, ) - return ExecutionApiMapper.to_response(new_result) + return ExecutionResponse.model_validate(new_result) @router.get("/executions/{execution_id}/events", response_model=list[ExecutionEventResponse]) @@ -274,7 +273,7 @@ async def get_user_executions( user_id=current_user.user_id, status=status, lang=lang, start_time=start_time, end_time=end_time ) - execution_results = [ExecutionApiMapper.to_result(e) for e in executions] + execution_results = [ExecutionResult.model_validate(e) for e in executions] return ExecutionListResponse( executions=execution_results, total=total_count, limit=limit, skip=skip, has_more=(skip + limit) < total_count diff --git a/backend/app/api/routes/notifications.py b/backend/app/api/routes/notifications.py index b8829cf1..8178f6d5 100644 --- a/backend/app/api/routes/notifications.py +++ b/backend/app/api/routes/notifications.py @@ -3,10 +3,10 @@ from fastapi import APIRouter, Query, Request, Response from app.domain.enums.notification import NotificationChannel, NotificationStatus -from app.infrastructure.mappers import NotificationApiMapper from app.schemas_pydantic.notification import ( DeleteNotificationResponse, NotificationListResponse, + NotificationResponse, NotificationSubscription, SubscriptionsResponse, SubscriptionUpdate, @@ -40,7 +40,11 @@ async def get_notifications( exclude_tags=exclude_tags, tag_prefix=tag_prefix, ) - return NotificationApiMapper.list_result_to_response(result) + return NotificationListResponse( + notifications=[NotificationResponse.model_validate(n) for n in result.notifications], + total=result.total, + unread_count=result.unread_count, + ) @router.put("/{notification_id}/read", status_code=204) @@ -72,7 +76,9 @@ async def get_subscriptions( ) -> SubscriptionsResponse: current_user = await auth_service.get_current_user(request) subscriptions_dict = await notification_service.get_subscriptions(current_user.user_id) - return NotificationApiMapper.subscriptions_dict_to_response(subscriptions_dict) + return SubscriptionsResponse( + subscriptions=[NotificationSubscription.model_validate(s) for s in subscriptions_dict.values()] + ) @router.put("/subscriptions/{channel}", response_model=NotificationSubscription) @@ -94,7 +100,7 @@ async def update_subscription( include_tags=subscription.include_tags, exclude_tags=subscription.exclude_tags, ) - return NotificationApiMapper.subscription_to_pydantic(updated_sub) + return NotificationSubscription.model_validate(updated_sub) @router.get("/unread-count", response_model=UnreadCountResponse) diff --git a/backend/app/api/routes/replay.py b/backend/app/api/routes/replay.py index 628ef531..0d4c1a94 100644 --- a/backend/app/api/routes/replay.py +++ b/backend/app/api/routes/replay.py @@ -70,7 +70,7 @@ async def list_replay_sessions( @router.get("/sessions/{session_id}", response_model=ReplaySession) async def get_replay_session(session_id: str, service: FromDishka[ReplayService]) -> ReplaySession: state = service.get_session(session_id) - return ReplayApiMapper.session_to_response(state) + return ReplaySession.model_validate(state) @router.post("/cleanup", response_model=CleanupResponse) diff --git a/backend/app/api/routes/saga.py b/backend/app/api/routes/saga.py index 5d2e4a0c..40037a3c 100644 --- a/backend/app/api/routes/saga.py +++ b/backend/app/api/routes/saga.py @@ -3,7 +3,6 @@ from fastapi import APIRouter, Query, Request from app.domain.enums.saga import SagaState -from app.infrastructure.mappers import SagaResponseMapper from app.infrastructure.mappers import UserMapper as AdminUserMapper from app.schemas_pydantic.saga import ( SagaCancellationResponse, @@ -47,8 +46,7 @@ async def get_saga_status( service_user = User.from_response(current_user) domain_user = AdminUserMapper.from_pydantic_service_user(service_user) saga = await saga_service.get_saga_with_access_check(saga_id, domain_user) - mapper = SagaResponseMapper() - return mapper.to_response(saga) + return SagaStatusResponse.from_domain(saga) @router.get("/execution/{execution_id}", response_model=SagaListResponse) @@ -58,6 +56,8 @@ async def get_execution_sagas( saga_service: FromDishka[SagaService], auth_service: FromDishka[AuthService], state: SagaState | None = Query(None, description="Filter by saga state"), + limit: int = Query(100, ge=1, le=1000), + skip: int = Query(0, ge=0), ) -> SagaListResponse: """Get all sagas for an execution. @@ -67,9 +67,11 @@ async def get_execution_sagas( saga_service: Saga service from DI auth_service: Auth service from DI state: Optional state filter + limit: Maximum number of results + skip: Number of results to skip Returns: - List of sagas for the execution + Paginated list of sagas for the execution Raises: HTTPException: 403 if access denied @@ -78,10 +80,15 @@ async def get_execution_sagas( service_user = User.from_response(current_user) domain_user = AdminUserMapper.from_pydantic_service_user(service_user) - sagas = await saga_service.get_execution_sagas(execution_id, domain_user, state) - mapper = SagaResponseMapper() - saga_responses = mapper.list_to_responses(sagas) - return SagaListResponse(sagas=saga_responses, total=len(saga_responses)) + result = await saga_service.get_execution_sagas(execution_id, domain_user, state, limit=limit, skip=skip) + saga_responses = [SagaStatusResponse.from_domain(s) for s in result.sagas] + return SagaListResponse( + sagas=saga_responses, + total=result.total, + skip=skip, + limit=limit, + has_more=result.has_more, + ) @router.get("/", response_model=SagaListResponse) @@ -91,7 +98,7 @@ async def list_sagas( auth_service: FromDishka[AuthService], state: SagaState | None = Query(None, description="Filter by saga state"), limit: int = Query(100, ge=1, le=1000), - offset: int = Query(0, ge=0), + skip: int = Query(0, ge=0), ) -> SagaListResponse: """List sagas accessible by the current user. @@ -101,7 +108,7 @@ async def list_sagas( auth_service: Auth service from DI state: Optional state filter limit: Maximum number of results - offset: Number of results to skip + skip: Number of results to skip Returns: Paginated list of sagas @@ -110,10 +117,15 @@ async def list_sagas( service_user = User.from_response(current_user) domain_user = AdminUserMapper.from_pydantic_service_user(service_user) - result = await saga_service.list_user_sagas(domain_user, state, limit, offset) - mapper = SagaResponseMapper() - saga_responses = mapper.list_to_responses(result.sagas) - return SagaListResponse(sagas=saga_responses, total=result.total) + result = await saga_service.list_user_sagas(domain_user, state, limit, skip) + saga_responses = [SagaStatusResponse.from_domain(s) for s in result.sagas] + return SagaListResponse( + sagas=saga_responses, + total=result.total, + skip=skip, + limit=limit, + has_more=result.has_more, + ) @router.post("/{saga_id}/cancel", response_model=SagaCancellationResponse) diff --git a/backend/app/api/routes/saved_scripts.py b/backend/app/api/routes/saved_scripts.py index 12aff22e..e5cc97e9 100644 --- a/backend/app/api/routes/saved_scripts.py +++ b/backend/app/api/routes/saved_scripts.py @@ -2,10 +2,11 @@ from dishka.integrations.fastapi import DishkaRoute from fastapi import APIRouter, Request -from app.infrastructure.mappers import SavedScriptApiMapper +from app.domain.saved_script import DomainSavedScriptCreate, DomainSavedScriptUpdate from app.schemas_pydantic.saved_script import ( SavedScriptCreateRequest, SavedScriptResponse, + SavedScriptUpdate, ) from app.services.auth_service import AuthService from app.services.saved_script_service import SavedScriptService @@ -21,9 +22,9 @@ async def create_saved_script( auth_service: FromDishka[AuthService], ) -> SavedScriptResponse: current_user = await auth_service.get_current_user(request) - create = SavedScriptApiMapper.request_to_create(saved_script) + create = DomainSavedScriptCreate(**saved_script.model_dump()) domain = await saved_script_service.create_saved_script(create, current_user.user_id) - return SavedScriptApiMapper.to_response(domain) + return SavedScriptResponse.model_validate(domain) @router.get("/scripts", response_model=list[SavedScriptResponse]) @@ -34,7 +35,7 @@ async def list_saved_scripts( ) -> list[SavedScriptResponse]: current_user = await auth_service.get_current_user(request) items = await saved_script_service.list_saved_scripts(current_user.user_id) - return SavedScriptApiMapper.list_to_response(items) + return [SavedScriptResponse.model_validate(item) for item in items] @router.get("/scripts/{script_id}", response_model=SavedScriptResponse) @@ -47,22 +48,22 @@ async def get_saved_script( current_user = await auth_service.get_current_user(request) domain = await saved_script_service.get_saved_script(script_id, current_user.user_id) - return SavedScriptApiMapper.to_response(domain) + return SavedScriptResponse.model_validate(domain) @router.put("/scripts/{script_id}", response_model=SavedScriptResponse) async def update_saved_script( request: Request, script_id: str, - script_update: SavedScriptCreateRequest, + script_update: SavedScriptUpdate, saved_script_service: FromDishka[SavedScriptService], auth_service: FromDishka[AuthService], ) -> SavedScriptResponse: current_user = await auth_service.get_current_user(request) - update_data = SavedScriptApiMapper.request_to_update(script_update) + update_data = DomainSavedScriptUpdate(**script_update.model_dump()) domain = await saved_script_service.update_saved_script(script_id, current_user.user_id, update_data) - return SavedScriptApiMapper.to_response(domain) + return SavedScriptResponse.model_validate(domain) @router.delete("/scripts/{script_id}", status_code=204) diff --git a/backend/app/api/routes/user_settings.py b/backend/app/api/routes/user_settings.py index e1ebf295..6ba9f430 100644 --- a/backend/app/api/routes/user_settings.py +++ b/backend/app/api/routes/user_settings.py @@ -5,12 +5,17 @@ from fastapi import APIRouter, Depends from app.api.dependencies import current_user -from app.infrastructure.mappers import UserSettingsApiMapper +from app.domain.user.settings_models import ( + DomainEditorSettings, + DomainNotificationSettings, + DomainUserSettingsUpdate, +) from app.schemas_pydantic.user import UserResponse from app.schemas_pydantic.user_settings import ( EditorSettings, NotificationSettings, RestoreSettingsRequest, + SettingsHistoryEntry, SettingsHistoryResponse, ThemeUpdateRequest, UserSettings, @@ -27,7 +32,7 @@ async def get_user_settings( settings_service: FromDishka[UserSettingsService], ) -> UserSettings: domain = await settings_service.get_user_settings(current_user.user_id) - return UserSettingsApiMapper.to_api_settings(domain) + return UserSettings.model_validate(domain) @router.put("/", response_model=UserSettings) @@ -36,9 +41,19 @@ async def update_user_settings( updates: UserSettingsUpdate, settings_service: FromDishka[UserSettingsService], ) -> UserSettings: - domain_updates = UserSettingsApiMapper.to_domain_update(updates) + domain_updates = DomainUserSettingsUpdate( + theme=updates.theme, + timezone=updates.timezone, + date_format=updates.date_format, + time_format=updates.time_format, + notifications=( + DomainNotificationSettings(**updates.notifications.model_dump()) if updates.notifications else None + ), + editor=DomainEditorSettings(**updates.editor.model_dump()) if updates.editor else None, + custom_settings=updates.custom_settings, + ) domain = await settings_service.update_user_settings(current_user.user_id, domain_updates) - return UserSettingsApiMapper.to_api_settings(domain) + return UserSettings.model_validate(domain) @router.put("/theme", response_model=UserSettings) @@ -48,7 +63,7 @@ async def update_theme( settings_service: FromDishka[UserSettingsService], ) -> UserSettings: domain = await settings_service.update_theme(current_user.user_id, update_request.theme) - return UserSettingsApiMapper.to_api_settings(domain) + return UserSettings.model_validate(domain) @router.put("/notifications", response_model=UserSettings) @@ -59,9 +74,9 @@ async def update_notification_settings( ) -> UserSettings: domain = await settings_service.update_notification_settings( current_user.user_id, - UserSettingsApiMapper._to_domain_notifications(notifications), + DomainNotificationSettings(**notifications.model_dump()), ) - return UserSettingsApiMapper.to_api_settings(domain) + return UserSettings.model_validate(domain) @router.put("/editor", response_model=UserSettings) @@ -72,9 +87,9 @@ async def update_editor_settings( ) -> UserSettings: domain = await settings_service.update_editor_settings( current_user.user_id, - UserSettingsApiMapper._to_domain_editor(editor), + DomainEditorSettings(**editor.model_dump()), ) - return UserSettingsApiMapper.to_api_settings(domain) + return UserSettings.model_validate(domain) @router.get("/history", response_model=SettingsHistoryResponse) @@ -84,7 +99,8 @@ async def get_settings_history( limit: int = 50, ) -> SettingsHistoryResponse: history = await settings_service.get_settings_history(current_user.user_id, limit=limit) - return UserSettingsApiMapper.history_to_api(history) + entries = [SettingsHistoryEntry.model_validate(entry) for entry in history] + return SettingsHistoryResponse(history=entries, limit=limit) @router.post("/restore", response_model=UserSettings) @@ -94,7 +110,7 @@ async def restore_settings( settings_service: FromDishka[UserSettingsService], ) -> UserSettings: domain = await settings_service.restore_settings_to_point(current_user.user_id, restore_request.timestamp) - return UserSettingsApiMapper.to_api_settings(domain) + return UserSettings.model_validate(domain) @router.put("/custom/{key}") @@ -105,4 +121,4 @@ async def update_custom_setting( settings_service: FromDishka[UserSettingsService], ) -> UserSettings: domain = await settings_service.update_custom_setting(current_user.user_id, key, value) - return UserSettingsApiMapper.to_api_settings(domain) + return UserSettings.model_validate(domain) diff --git a/backend/app/db/repositories/admin/admin_events_repository.py b/backend/app/db/repositories/admin/admin_events_repository.py index 0ba68df4..a7aa3b00 100644 --- a/backend/app/db/repositories/admin/admin_events_repository.py +++ b/backend/app/db/repositories/admin/admin_events_repository.py @@ -353,20 +353,11 @@ async def count_events_for_replay(self, query: Dict[str, Any]) -> int: """Count events matching replay query.""" return await self.events_collection.count_documents(query) - async def get_events_preview_for_replay(self, query: Dict[str, Any], limit: int = 100) -> List[Dict[str, Any]]: + async def get_events_preview_for_replay(self, query: Dict[str, Any], limit: int = 100) -> List[EventSummary]: """Get preview of events for replay.""" cursor = self.events_collection.find(query).limit(limit) event_docs = await cursor.to_list(length=limit) - - # Convert to event summaries - summaries: List[Dict[str, Any]] = [] - for doc in event_docs: - summary = self.summary_mapper.from_mongo_document(doc) - summary_dict = self.summary_mapper.to_dict(summary) - # Convert EventFields enum keys to strings - summaries.append({str(k): v for k, v in summary_dict.items()}) - - return summaries + return [self.summary_mapper.from_mongo_document(doc) for doc in event_docs] def build_replay_query(self, replay_query: ReplayQuery) -> Dict[str, Any]: """Build MongoDB query from replay query model.""" @@ -385,8 +376,7 @@ async def prepare_replay_session( # Get events preview for dry run events_preview: List[EventSummary] = [] if dry_run: - preview_docs = await self.get_events_preview_for_replay(query, limit=100) - events_preview = [self.summary_mapper.from_mongo_document(e) for e in preview_docs] + events_preview = await self.get_events_preview_for_replay(query, limit=100) # Return unified session data session_data = ReplaySessionData( diff --git a/backend/app/db/repositories/event_repository.py b/backend/app/db/repositories/event_repository.py index 82b2bb20..a6b673fc 100644 --- a/backend/app/db/repositories/event_repository.py +++ b/backend/app/db/repositories/event_repository.py @@ -125,14 +125,19 @@ async def get_events_by_aggregate( docs = await cursor.to_list(length=limit) return [self.mapper.from_mongo_document(doc) for doc in docs] - async def get_events_by_correlation(self, correlation_id: str, limit: int = 100) -> list[Event]: - cursor = ( - self._collection.find({EventFields.METADATA_CORRELATION_ID: correlation_id}) - .sort(EventFields.TIMESTAMP, ASCENDING) - .limit(limit) - ) + async def get_events_by_correlation(self, correlation_id: str, limit: int = 100, skip: int = 0) -> EventListResult: + query: dict[str, Any] = {EventFields.METADATA_CORRELATION_ID: correlation_id} + total_count = await self._collection.count_documents(query) + + cursor = self._collection.find(query).sort(EventFields.TIMESTAMP, ASCENDING).skip(skip).limit(limit) docs = await cursor.to_list(length=limit) - return [self.mapper.from_mongo_document(doc) for doc in docs] + return EventListResult( + events=[self.mapper.from_mongo_document(doc) for doc in docs], + total=total_count, + skip=skip, + limit=limit, + has_more=(skip + limit) < total_count, + ) async def get_events_by_user( self, @@ -154,12 +159,28 @@ async def get_events_by_user( docs = await cursor.to_list(length=limit) return [self.mapper.from_mongo_document(doc) for doc in docs] - async def get_execution_events(self, execution_id: str, limit: int = 100) -> list[Event]: - query = {"$or": [{EventFields.PAYLOAD_EXECUTION_ID: execution_id}, {EventFields.AGGREGATE_ID: execution_id}]} + async def get_execution_events( + self, execution_id: str, limit: int = 100, skip: int = 0, exclude_system_events: bool = False + ) -> EventListResult: + query: dict[str, Any] = { + "$or": [{EventFields.PAYLOAD_EXECUTION_ID: execution_id}, {EventFields.AGGREGATE_ID: execution_id}] + } + + # Filter out system events at DB level for accurate pagination + if exclude_system_events: + query[EventFields.METADATA_SERVICE_NAME] = {"$not": {"$regex": "^system-"}} - cursor = self._collection.find(query).sort(EventFields.TIMESTAMP, ASCENDING).limit(limit) + total_count = await self._collection.count_documents(query) + + cursor = self._collection.find(query).sort(EventFields.TIMESTAMP, ASCENDING).skip(skip).limit(limit) docs = await cursor.to_list(length=limit) - return [self.mapper.from_mongo_document(doc) for doc in docs] + return EventListResult( + events=[self.mapper.from_mongo_document(doc) for doc in docs], + total=total_count, + skip=skip, + limit=limit, + has_more=(skip + limit) < total_count, + ) async def search_events( self, text_query: str, filters: dict[str, object] | None = None, limit: int = 100, skip: int = 0 diff --git a/backend/app/db/repositories/saga_repository.py b/backend/app/db/repositories/saga_repository.py index eb82be2b..477b3c4e 100644 --- a/backend/app/db/repositories/saga_repository.py +++ b/backend/app/db/repositories/saga_repository.py @@ -46,14 +46,19 @@ async def get_saga(self, saga_id: str) -> Saga | None: doc = await self.sagas.find_one({"saga_id": saga_id}) return self.mapper.from_mongo(doc) if doc else None - async def get_sagas_by_execution(self, execution_id: str, state: SagaState | None = None) -> list[Saga]: + async def get_sagas_by_execution( + self, execution_id: str, state: SagaState | None = None, limit: int = 100, skip: int = 0 + ) -> SagaListResult: query: dict[str, object] = {"execution_id": execution_id} if state: query["state"] = state.value - cursor = self.sagas.find(query).sort("created_at", DESCENDING) - docs = await cursor.to_list(length=None) - return [self.mapper.from_mongo(doc) for doc in docs] + total = await self.sagas.count_documents(query) + cursor = self.sagas.find(query).sort("created_at", DESCENDING).skip(skip).limit(limit) + docs = await cursor.to_list(length=limit) + sagas = [self.mapper.from_mongo(doc) for doc in docs] + + return SagaListResult(sagas=sagas, total=total, skip=skip, limit=limit) async def list_sagas(self, saga_filter: SagaFilter, limit: int = 100, skip: int = 0) -> SagaListResult: query = self.filter_mapper.to_mongodb_query(saga_filter) diff --git a/backend/app/db/repositories/user_settings_repository.py b/backend/app/db/repositories/user_settings_repository.py index bca4f614..f91f50b9 100644 --- a/backend/app/db/repositories/user_settings_repository.py +++ b/backend/app/db/repositories/user_settings_repository.py @@ -59,7 +59,10 @@ async def get_settings_events( until: datetime | None = None, limit: int | None = None, ) -> List[DomainSettingsEvent]: - query = {"aggregate_id": f"user_settings_{user_id}", "event_type": {"$in": [str(et) for et in event_types]}} + query: Dict[str, Any] = { + "aggregate_id": f"user_settings_{user_id}", + "event_type": {"$in": [str(et) for et in event_types]}, + } if since or until: timestamp_query: Dict[str, Any] = {} diff --git a/backend/app/domain/events/__init__.py b/backend/app/domain/events/__init__.py index c2c7d4d6..d96d26bf 100644 --- a/backend/app/domain/events/__init__.py +++ b/backend/app/domain/events/__init__.py @@ -1,3 +1,4 @@ +from app.domain.events.event_metadata import EventMetadata from app.domain.events.event_models import ( ArchivedEvent, Event, @@ -12,7 +13,6 @@ EventStatistics, ExecutionEventsResult, ) -from app.infrastructure.kafka.events.metadata import EventMetadata __all__ = [ "ArchivedEvent", diff --git a/backend/app/domain/events/event_metadata.py b/backend/app/domain/events/event_metadata.py new file mode 100644 index 00000000..ad44c8ed --- /dev/null +++ b/backend/app/domain/events/event_metadata.py @@ -0,0 +1,47 @@ +from dataclasses import asdict, dataclass, field, replace +from typing import Any +from uuid import uuid4 + +from app.domain.enums.common import Environment + + +@dataclass +class EventMetadata: + """Domain event metadata for auditing and tracing.""" + + service_name: str + service_version: str + correlation_id: str = field(default_factory=lambda: str(uuid4())) + user_id: str | None = None + ip_address: str | None = None + user_agent: str | None = None + environment: Environment = Environment.PRODUCTION + + def to_dict(self, exclude_none: bool = True) -> dict[str, Any]: + result = asdict(self) + if isinstance(result.get("environment"), Environment): + result["environment"] = result["environment"].value + if exclude_none: + return {k: v for k, v in result.items() if v is not None} + return result + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> "EventMetadata": + env = data.get("environment", Environment.PRODUCTION) + if isinstance(env, str): + env = Environment(env) + return cls( + service_name=data.get("service_name", "unknown"), + service_version=data.get("service_version", "1.0"), + correlation_id=data.get("correlation_id", str(uuid4())), + user_id=data.get("user_id"), + ip_address=data.get("ip_address"), + user_agent=data.get("user_agent"), + environment=env, + ) + + def with_correlation(self, correlation_id: str) -> "EventMetadata": + return replace(self, correlation_id=correlation_id) + + def with_user(self, user_id: str) -> "EventMetadata": + return replace(self, user_id=user_id) diff --git a/backend/app/domain/events/event_models.py b/backend/app/domain/events/event_models.py index 35dbc6f6..4c87c332 100644 --- a/backend/app/domain/events/event_models.py +++ b/backend/app/domain/events/event_models.py @@ -4,7 +4,7 @@ from app.core.utils import StringEnum from app.domain.enums.events import EventType -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.domain.events.event_metadata import EventMetadata MongoQueryValue = str | dict[str, str | list[str] | float | datetime] MongoQuery = dict[str, MongoQueryValue] diff --git a/backend/app/events/core/producer.py b/backend/app/events/core/producer.py index ab5241b1..b174d1e2 100644 --- a/backend/app/events/core/producer.py +++ b/backend/app/events/core/producer.py @@ -1,6 +1,7 @@ import asyncio import json import socket +import threading from datetime import datetime, timezone from typing import Any, Callable, TypeAlias @@ -18,6 +19,10 @@ from .types import ProducerConfig, ProducerMetrics, ProducerState +# Global lock to serialize Producer initialization (workaround for librdkafka race condition) +# See: https://github.com/confluentinc/confluent-kafka-python/issues/1797 +_producer_init_lock = threading.Lock() + DeliveryCallback: TypeAlias = Callable[[KafkaError | None, Message], None] StatsCallback: TypeAlias = Callable[[dict[str, Any]], None] @@ -113,7 +118,9 @@ async def start(self) -> None: producer_config["stats_cb"] = self._handle_stats producer_config["statistics.interval.ms"] = 30000 - self._producer = Producer(producer_config) + # Serialize Producer initialization to prevent librdkafka race condition + with _producer_init_lock: + self._producer = Producer(producer_config) self._running = True self._poll_task = asyncio.create_task(self._poll_loop()) self._state = ProducerState.RUNNING diff --git a/backend/app/events/metadata.py b/backend/app/events/metadata.py deleted file mode 100644 index f73470a4..00000000 --- a/backend/app/events/metadata.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Any, Dict -from uuid import uuid4 - -from pydantic import ConfigDict, Field -from pydantic_avro import AvroBase # type: ignore[attr-defined] - -from app.domain.enums.common import Environment - - -class EventMetadata(AvroBase): - """Unified event metadata for auditing and tracing.""" - - service_name: str - service_version: str - correlation_id: str = Field(default_factory=lambda: str(uuid4())) - user_id: str | None = None - ip_address: str | None = None - user_agent: str | None = None - environment: Environment = Environment.PRODUCTION - - model_config = ConfigDict(extra="allow", str_strip_whitespace=True, use_enum_values=True) - - def to_dict(self, exclude_none: bool = True) -> Dict[str, Any]: - return self.model_dump(exclude_none=exclude_none) - - @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "EventMetadata": - return cls( - service_name=data.get("service_name", "unknown"), - service_version=data.get("service_version", "1.0"), - correlation_id=data.get("correlation_id", str(uuid4())), - user_id=data.get("user_id"), - ip_address=data.get("ip_address"), - user_agent=data.get("user_agent"), - environment=data.get("environment", Environment.PRODUCTION), - ) - - def with_correlation(self, correlation_id: str) -> "EventMetadata": - return self.model_copy(update={"correlation_id": correlation_id}) - - def with_user(self, user_id: str) -> "EventMetadata": - return self.model_copy(update={"user_id": user_id}) - - def ensure_correlation_id(self) -> "EventMetadata": - if self.correlation_id: - return self - return self.model_copy(update={"correlation_id": str(uuid4())}) diff --git a/backend/app/infrastructure/kafka/__init__.py b/backend/app/infrastructure/kafka/__init__.py index e9d26a9f..df6d0ee2 100644 --- a/backend/app/infrastructure/kafka/__init__.py +++ b/backend/app/infrastructure/kafka/__init__.py @@ -1,13 +1,13 @@ """Kafka infrastructure for event-driven architecture.""" from app.infrastructure.kafka.events.base import BaseEvent -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.kafka.mappings import get_event_class_for_type, get_topic_for_event from app.infrastructure.kafka.topics import get_all_topics, get_topic_configs __all__ = [ "BaseEvent", - "EventMetadata", + "AvroEventMetadata", "get_all_topics", "get_topic_configs", "get_event_class_for_type", diff --git a/backend/app/infrastructure/kafka/events/__init__.py b/backend/app/infrastructure/kafka/events/__init__.py index 6954a4a6..f06fb640 100644 --- a/backend/app/infrastructure/kafka/events/__init__.py +++ b/backend/app/infrastructure/kafka/events/__init__.py @@ -10,7 +10,7 @@ ExecutionStartedEvent, ExecutionTimeoutEvent, ) -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.kafka.events.notification import ( NotificationClickedEvent, NotificationCreatedEvent, @@ -70,7 +70,7 @@ __all__ = [ # Base "BaseEvent", - "EventMetadata", + "AvroEventMetadata", # Execution "ExecutionRequestedEvent", "ExecutionAcceptedEvent", diff --git a/backend/app/infrastructure/kafka/events/base.py b/backend/app/infrastructure/kafka/events/base.py index e48e75ec..6af405d5 100644 --- a/backend/app/infrastructure/kafka/events/base.py +++ b/backend/app/infrastructure/kafka/events/base.py @@ -7,7 +7,7 @@ from app.domain.enums.events import EventType from app.domain.enums.kafka import KafkaTopic -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata class BaseEvent(AvroBase): @@ -18,7 +18,7 @@ class BaseEvent(AvroBase): event_version: str = "1.0" timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) aggregate_id: str | None = None - metadata: EventMetadata + metadata: AvroEventMetadata # Each subclass must define its topic topic: ClassVar[KafkaTopic] diff --git a/backend/app/infrastructure/kafka/events/metadata.py b/backend/app/infrastructure/kafka/events/metadata.py index f73470a4..23805032 100644 --- a/backend/app/infrastructure/kafka/events/metadata.py +++ b/backend/app/infrastructure/kafka/events/metadata.py @@ -7,7 +7,7 @@ from app.domain.enums.common import Environment -class EventMetadata(AvroBase): +class AvroEventMetadata(AvroBase): """Unified event metadata for auditing and tracing.""" service_name: str @@ -24,7 +24,7 @@ def to_dict(self, exclude_none: bool = True) -> Dict[str, Any]: return self.model_dump(exclude_none=exclude_none) @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "EventMetadata": + def from_dict(cls, data: Dict[str, Any]) -> "AvroEventMetadata": return cls( service_name=data.get("service_name", "unknown"), service_version=data.get("service_version", "1.0"), @@ -35,13 +35,13 @@ def from_dict(cls, data: Dict[str, Any]) -> "EventMetadata": environment=data.get("environment", Environment.PRODUCTION), ) - def with_correlation(self, correlation_id: str) -> "EventMetadata": + def with_correlation(self, correlation_id: str) -> "AvroEventMetadata": return self.model_copy(update={"correlation_id": correlation_id}) - def with_user(self, user_id: str) -> "EventMetadata": + def with_user(self, user_id: str) -> "AvroEventMetadata": return self.model_copy(update={"user_id": user_id}) - def ensure_correlation_id(self) -> "EventMetadata": + def ensure_correlation_id(self) -> "AvroEventMetadata": if self.correlation_id: return self return self.model_copy(update={"correlation_id": str(uuid4())}) diff --git a/backend/app/infrastructure/mappers/__init__.py b/backend/app/infrastructure/mappers/__init__.py index ce001bc0..7dcd86e4 100644 --- a/backend/app/infrastructure/mappers/__init__.py +++ b/backend/app/infrastructure/mappers/__init__.py @@ -1,101 +1,68 @@ from .admin_mapper import ( AuditLogMapper, SettingsMapper, - UserListResultMapper, UserMapper, ) -from .admin_overview_api_mapper import AdminOverviewApiMapper from .event_mapper import ( ArchivedEventMapper, - EventBrowseResultMapper, - EventDetailMapper, EventExportRowMapper, EventFilterMapper, - EventListResultMapper, EventMapper, - EventProjectionMapper, - EventReplayInfoMapper, - EventStatisticsMapper, EventSummaryMapper, ) -from .execution_api_mapper import ExecutionApiMapper -from .notification_api_mapper import NotificationApiMapper from .notification_mapper import NotificationMapper from .rate_limit_mapper import ( RateLimitConfigMapper, RateLimitRuleMapper, - RateLimitStatusMapper, UserRateLimitMapper, ) from .replay_api_mapper import ReplayApiMapper from .replay_mapper import ReplayApiMapper as AdminReplayApiMapper from .replay_mapper import ( ReplayQueryMapper, - ReplaySessionDataMapper, ReplaySessionMapper, ReplayStateMapper, ) from .saga_mapper import ( - SagaEventMapper, SagaFilterMapper, SagaInstanceMapper, SagaMapper, - SagaResponseMapper, ) -from .saved_script_api_mapper import SavedScriptApiMapper from .saved_script_mapper import SavedScriptMapper from .sse_mapper import SSEMapper -from .user_settings_api_mapper import UserSettingsApiMapper from .user_settings_mapper import UserSettingsMapper __all__ = [ # Admin "UserMapper", - "UserListResultMapper", "SettingsMapper", "AuditLogMapper", - "AdminOverviewApiMapper", # Events "EventMapper", "EventSummaryMapper", - "EventDetailMapper", - "EventListResultMapper", - "EventBrowseResultMapper", - "EventStatisticsMapper", - "EventProjectionMapper", "ArchivedEventMapper", "EventExportRowMapper", "EventFilterMapper", - "EventReplayInfoMapper", - # Execution - "ExecutionApiMapper", # Notification - "NotificationApiMapper", "NotificationMapper", # Rate limit "RateLimitRuleMapper", "UserRateLimitMapper", "RateLimitConfigMapper", - "RateLimitStatusMapper", # Replay "ReplayApiMapper", "AdminReplayApiMapper", "ReplaySessionMapper", "ReplayQueryMapper", - "ReplaySessionDataMapper", "ReplayStateMapper", # Saved scripts - "SavedScriptApiMapper", "SavedScriptMapper", # SSE "SSEMapper", # User settings - "UserSettingsApiMapper", "UserSettingsMapper", # Saga "SagaMapper", "SagaFilterMapper", - "SagaResponseMapper", - "SagaEventMapper", "SagaInstanceMapper", ] diff --git a/backend/app/infrastructure/mappers/admin_mapper.py b/backend/app/infrastructure/mappers/admin_mapper.py index fa192b16..0800048e 100644 --- a/backend/app/infrastructure/mappers/admin_mapper.py +++ b/backend/app/infrastructure/mappers/admin_mapper.py @@ -19,7 +19,6 @@ from app.domain.user import ( UserCreation, UserFields, - UserListResult, UserRole, UserSearchFilter, UserUpdate, @@ -67,22 +66,6 @@ def from_mongo_document(data: Dict[str, Any]) -> DomainAdminUser: updated_at=data.get(UserFields.UPDATED_AT, datetime.now(timezone.utc)), ) - @staticmethod - def to_response_dict(user: DomainAdminUser) -> Dict[str, Any]: - created_at_ts = user.created_at.timestamp() if user.created_at else 0.0 - updated_at_ts = user.updated_at.timestamp() if user.updated_at else 0.0 - - return { - "user_id": user.user_id, - "username": user.username, - "email": user.email, - "role": user.role.value, - "is_active": user.is_active, - "is_superuser": user.is_superuser, - "created_at": created_at_ts, - "updated_at": updated_at_ts, - } - @staticmethod def from_pydantic_service_user(user: ServiceUser) -> DomainAdminUser: """Convert internal service Pydantic user to domain admin user.""" @@ -140,18 +123,6 @@ def user_creation_to_dict(creation: UserCreation) -> Dict[str, Any]: } -class UserListResultMapper: - @staticmethod - def to_dict(result: UserListResult) -> Dict[str, Any]: - user_mapper = UserMapper() - return { - "users": [user_mapper.to_response_dict(user) for user in result.users], - "total": result.total, - "offset": result.offset, - "limit": result.limit, - } - - class SettingsMapper: @staticmethod def execution_limits_to_dict(limits: ExecutionLimits) -> dict[str, int]: diff --git a/backend/app/infrastructure/mappers/admin_overview_api_mapper.py b/backend/app/infrastructure/mappers/admin_overview_api_mapper.py deleted file mode 100644 index a624ad84..00000000 --- a/backend/app/infrastructure/mappers/admin_overview_api_mapper.py +++ /dev/null @@ -1,47 +0,0 @@ -from __future__ import annotations - -from typing import Any, Dict, List - -from app.domain.admin import AdminUserOverviewDomain -from app.schemas_pydantic.admin_user_overview import ( - AdminUserOverview, - DerivedCounts, - RateLimitSummary, -) -from app.schemas_pydantic.events import EventStatistics as EventStatisticsSchema -from app.schemas_pydantic.user import UserResponse - -from .admin_mapper import UserMapper -from .event_mapper import EventMapper, EventStatisticsMapper - - -class AdminOverviewApiMapper: - def __init__(self) -> None: - self._user_mapper = UserMapper() - self._event_mapper = EventMapper() - self._stats_mapper = EventStatisticsMapper() - - def to_response(self, d: AdminUserOverviewDomain) -> AdminUserOverview: - user_resp = UserResponse(**self._user_mapper.to_response_dict(d.user)) - stats_dict = self._stats_mapper.to_dict(d.stats) - stats_schema = EventStatisticsSchema(**stats_dict) - derived = DerivedCounts( - succeeded=d.derived_counts.succeeded, - failed=d.derived_counts.failed, - timeout=d.derived_counts.timeout, - cancelled=d.derived_counts.cancelled, - terminal_total=d.derived_counts.terminal_total, - ) - rl = RateLimitSummary( - bypass_rate_limit=d.rate_limit_summary.bypass_rate_limit, - global_multiplier=d.rate_limit_summary.global_multiplier, - has_custom_limits=d.rate_limit_summary.has_custom_limits, - ) - recent_events: List[Dict[str, Any]] = [self._event_mapper.to_dict(e) for e in d.recent_events] - return AdminUserOverview( - user=user_resp, - stats=stats_schema, - derived_counts=derived, - rate_limit_summary=rl, - recent_events=recent_events, - ) diff --git a/backend/app/infrastructure/mappers/dlq_mapper.py b/backend/app/infrastructure/mappers/dlq_mapper.py index 9d1d20eb..ed9db45e 100644 --- a/backend/app/infrastructure/mappers/dlq_mapper.py +++ b/backend/app/infrastructure/mappers/dlq_mapper.py @@ -7,13 +7,11 @@ from confluent_kafka import Message from app.dlq.models import ( - DLQBatchRetryResult, DLQFields, DLQMessage, DLQMessageFilter, DLQMessageStatus, DLQMessageUpdate, - DLQRetryResult, ) from app.events.schema.schema_registry import SchemaRegistryManager from app.infrastructure.kafka.events import BaseEvent @@ -147,44 +145,6 @@ def from_kafka_message(message: Message, schema_registry: SchemaRegistryManager) dlq_partition=partition if partition >= 0 else None, ) - @staticmethod - def to_response_dict(message: DLQMessage) -> dict[str, object]: - return { - "event_id": message.event_id, - "event_type": message.event_type, - "event": message.event.to_dict(), - "original_topic": message.original_topic, - "error": message.error, - "retry_count": message.retry_count, - "failed_at": message.failed_at, - "status": message.status, - "age_seconds": message.age_seconds, - "producer_id": message.producer_id, - "dlq_offset": message.dlq_offset, - "dlq_partition": message.dlq_partition, - "last_error": message.last_error, - "next_retry_at": message.next_retry_at, - "retried_at": message.retried_at, - "discarded_at": message.discarded_at, - "discard_reason": message.discard_reason, - } - - @staticmethod - def retry_result_to_dict(result: DLQRetryResult) -> dict[str, object]: - d: dict[str, object] = {"event_id": result.event_id, "status": result.status} - if result.error: - d["error"] = result.error - return d - - @staticmethod - def batch_retry_result_to_dict(result: DLQBatchRetryResult) -> dict[str, object]: - return { - "total": result.total, - "successful": result.successful, - "failed": result.failed, - "details": [DLQMapper.retry_result_to_dict(d) for d in result.details], - } - # Domain construction and updates @staticmethod def from_failed_event( diff --git a/backend/app/infrastructure/mappers/event_mapper.py b/backend/app/infrastructure/mappers/event_mapper.py index 6ea1e790..de616077 100644 --- a/backend/app/infrastructure/mappers/event_mapper.py +++ b/backend/app/infrastructure/mappers/event_mapper.py @@ -1,22 +1,15 @@ from datetime import datetime, timezone from typing import Any +from app.domain.events.event_metadata import EventMetadata from app.domain.events.event_models import ( ArchivedEvent, Event, - EventBrowseResult, - EventDetail, EventExportRow, EventFields, EventFilter, - EventListResult, - EventProjection, - EventReplayInfo, - EventStatistics, EventSummary, - HourlyEventCount, ) -from app.infrastructure.kafka.events.metadata import EventMetadata from app.schemas_pydantic.admin_events import EventFilter as AdminEventFilter @@ -31,7 +24,7 @@ def to_mongo_document(event: Event) -> dict[str, Any]: EventFields.EVENT_TYPE: event.event_type, EventFields.EVENT_VERSION: event.event_version, EventFields.TIMESTAMP: event.timestamp, - EventFields.METADATA: event.metadata.to_dict(), + EventFields.METADATA: event.metadata.to_dict(exclude_none=True), EventFields.PAYLOAD: event.payload, } @@ -84,65 +77,10 @@ def from_mongo_document(document: dict[str, Any]) -> Event: error=document.get(EventFields.ERROR), ) - @staticmethod - def to_dict(event: Event) -> dict[str, Any]: - """Convert event to API response dictionary.""" - result: dict[str, Any] = { - "event_id": event.event_id, - "event_type": event.event_type, - "event_version": event.event_version, - "timestamp": event.timestamp, - "metadata": event.metadata.to_dict(), - "payload": event.payload, - } - - if event.aggregate_id is not None: - result["aggregate_id"] = event.aggregate_id - if event.correlation_id: - result["correlation_id"] = event.correlation_id - if event.stored_at is not None: - result["stored_at"] = event.stored_at - if event.ttl_expires_at is not None: - result["ttl_expires_at"] = event.ttl_expires_at - if event.status is not None: - result["status"] = event.status - if event.error is not None: - result["error"] = event.error - - return result - - @staticmethod - def from_dict(data: dict[str, Any]) -> Event: - """Create event from API request dictionary.""" - return Event( - event_id=data["event_id"], - event_type=data["event_type"], - event_version=data.get("event_version", "1.0"), - timestamp=data.get("timestamp", datetime.now(timezone.utc)), - metadata=EventMetadata.from_dict(data.get("metadata", {})), - payload=data.get("payload", {}), - aggregate_id=data.get("aggregate_id"), - stored_at=data.get("stored_at"), - ttl_expires_at=data.get("ttl_expires_at"), - status=data.get("status"), - error=data.get("error"), - ) - class EventSummaryMapper: """Handles EventSummary serialization.""" - @staticmethod - def to_dict(summary: EventSummary) -> dict[EventFields, Any]: - result = { - EventFields.EVENT_ID: summary.event_id, - EventFields.EVENT_TYPE: summary.event_type, - EventFields.TIMESTAMP: summary.timestamp, - } - if summary.aggregate_id is not None: - result[EventFields.AGGREGATE_ID] = summary.aggregate_id - return result - @staticmethod def from_mongo_document(document: dict[str, Any]) -> EventSummary: return EventSummary( @@ -153,98 +91,6 @@ def from_mongo_document(document: dict[str, Any]) -> EventSummary: ) -class EventDetailMapper: - """Handles EventDetail serialization.""" - - @staticmethod - def to_dict(detail: EventDetail) -> dict[str, Any]: - event_mapper = EventMapper() - summary_mapper = EventSummaryMapper() - - return { - "event": event_mapper.to_dict(detail.event), - "related_events": [summary_mapper.to_dict(e) for e in detail.related_events], - "timeline": [summary_mapper.to_dict(e) for e in detail.timeline], - } - - -class EventListResultMapper: - """Handles EventListResult serialization.""" - - @staticmethod - def to_dict(result: EventListResult) -> dict[str, Any]: - event_mapper = EventMapper() - return { - "events": [event_mapper.to_dict(event) for event in result.events], - "total": result.total, - "skip": result.skip, - "limit": result.limit, - "has_more": result.has_more, - } - - -class EventBrowseResultMapper: - """Handles EventBrowseResult serialization.""" - - @staticmethod - def to_dict(result: EventBrowseResult) -> dict[str, Any]: - event_mapper = EventMapper() - return { - "events": [event_mapper.to_dict(event) for event in result.events], - "total": result.total, - "skip": result.skip, - "limit": result.limit, - } - - -class EventStatisticsMapper: - """Handles EventStatistics serialization.""" - - @staticmethod - def to_dict(stats: EventStatistics) -> dict[str, Any]: - result: dict[str, Any] = { - "total_events": stats.total_events, - "events_by_type": stats.events_by_type, - "events_by_service": stats.events_by_service, - "events_by_hour": [ - {"hour": h.hour, "count": h.count} if isinstance(h, HourlyEventCount) else h - for h in stats.events_by_hour - ], - "top_users": [{"user_id": u.user_id, "event_count": u.event_count} for u in stats.top_users], - "error_rate": stats.error_rate, - "avg_processing_time": stats.avg_processing_time, - } - - if stats.start_time is not None: - result["start_time"] = stats.start_time - if stats.end_time is not None: - result["end_time"] = stats.end_time - - return result - - -class EventProjectionMapper: - """Handles EventProjection serialization.""" - - @staticmethod - def to_dict(projection: EventProjection) -> dict[str, Any]: - result: dict[str, Any] = { - "name": projection.name, - "pipeline": projection.pipeline, - "output_collection": projection.output_collection, - "refresh_interval_seconds": projection.refresh_interval_seconds, - } - - if projection.description is not None: - result["description"] = projection.description - if projection.source_events is not None: - result["source_events"] = projection.source_events - if projection.last_updated is not None: - result["last_updated"] = projection.last_updated - - return result - - class ArchivedEventMapper: """Handles ArchivedEvent serialization.""" @@ -364,18 +210,3 @@ def from_admin_pydantic(pflt: AdminEventFilter) -> EventFilter: search_text=pflt.search_text, text_search=pflt.search_text, ) - - -class EventReplayInfoMapper: - """Handles EventReplayInfo serialization.""" - - @staticmethod - def to_dict(info: EventReplayInfo) -> dict[str, Any]: - event_mapper = EventMapper() - return { - "events": [event_mapper.to_dict(event) for event in info.events], - "event_count": info.event_count, - "event_types": info.event_types, - "start_time": info.start_time, - "end_time": info.end_time, - } diff --git a/backend/app/infrastructure/mappers/execution_api_mapper.py b/backend/app/infrastructure/mappers/execution_api_mapper.py deleted file mode 100644 index bf775fd0..00000000 --- a/backend/app/infrastructure/mappers/execution_api_mapper.py +++ /dev/null @@ -1,32 +0,0 @@ -from __future__ import annotations - -from app.domain.execution import DomainExecution -from app.schemas_pydantic.execution import ExecutionResponse, ExecutionResult -from app.schemas_pydantic.execution import ResourceUsage as ResourceUsageSchema - - -class ExecutionApiMapper: - @staticmethod - def to_response(e: DomainExecution) -> ExecutionResponse: - return ExecutionResponse( - execution_id=e.execution_id, - status=e.status, - ) - - @staticmethod - def to_result(e: DomainExecution) -> ExecutionResult: - ru = None - if e.resource_usage is not None: - ru = ResourceUsageSchema(**e.resource_usage.to_dict()) - - return ExecutionResult( - execution_id=e.execution_id, - status=e.status, - stdout=e.stdout, - stderr=e.stderr, - lang=e.lang, - lang_version=e.lang_version, - resource_usage=ru, - exit_code=e.exit_code, - error_type=e.error_type, - ) diff --git a/backend/app/infrastructure/mappers/notification_api_mapper.py b/backend/app/infrastructure/mappers/notification_api_mapper.py deleted file mode 100644 index 166ee14c..00000000 --- a/backend/app/infrastructure/mappers/notification_api_mapper.py +++ /dev/null @@ -1,67 +0,0 @@ -from __future__ import annotations - -from typing import Dict, List - -from app.domain.notification import ( - DomainNotification, - DomainNotificationListResult, - DomainNotificationSubscription, -) -from app.schemas_pydantic.notification import ( - NotificationListResponse, - NotificationResponse, - NotificationSubscription, - SubscriptionsResponse, -) - - -class NotificationApiMapper: - @staticmethod - def to_response(n: DomainNotification) -> NotificationResponse: - return NotificationResponse( - notification_id=n.notification_id, - channel=n.channel, - status=n.status, - subject=n.subject, - body=n.body, - action_url=n.action_url, - created_at=n.created_at, - read_at=n.read_at, - severity=n.severity, - tags=n.tags, - ) - - @staticmethod - def list_result_to_response(result: DomainNotificationListResult) -> NotificationListResponse: - return NotificationListResponse( - notifications=[NotificationApiMapper.to_response(x) for x in result.notifications], - total=result.total, - unread_count=result.unread_count, - ) - - @staticmethod - def subscription_to_pydantic(s: DomainNotificationSubscription) -> NotificationSubscription: - return NotificationSubscription( - user_id=s.user_id, - channel=s.channel, - enabled=s.enabled, - severities=s.severities, - include_tags=s.include_tags, - exclude_tags=s.exclude_tags, - webhook_url=s.webhook_url, - slack_webhook=s.slack_webhook, - quiet_hours_enabled=s.quiet_hours_enabled, - quiet_hours_start=s.quiet_hours_start, - quiet_hours_end=s.quiet_hours_end, - timezone=s.timezone, - batch_interval_minutes=s.batch_interval_minutes, - created_at=s.created_at, - updated_at=s.updated_at, - ) - - @staticmethod - def subscriptions_dict_to_response(subs: Dict[str, DomainNotificationSubscription]) -> SubscriptionsResponse: - py_subs: List[NotificationSubscription] = [ - NotificationApiMapper.subscription_to_pydantic(s) for s in subs.values() - ] - return SubscriptionsResponse(subscriptions=py_subs) diff --git a/backend/app/infrastructure/mappers/rate_limit_mapper.py b/backend/app/infrastructure/mappers/rate_limit_mapper.py index bca38d2e..2dcb359f 100644 --- a/backend/app/infrastructure/mappers/rate_limit_mapper.py +++ b/backend/app/infrastructure/mappers/rate_limit_mapper.py @@ -1,5 +1,4 @@ import json -from dataclasses import asdict from datetime import datetime, timezone from typing import Any, Dict @@ -8,7 +7,6 @@ RateLimitAlgorithm, RateLimitConfig, RateLimitRule, - RateLimitStatus, UserRateLimit, ) @@ -124,9 +122,3 @@ def model_validate_json(json_str: str | bytes) -> RateLimitConfig: def model_dump_json(config: RateLimitConfig) -> str: """Pydantic-compatible method for serialization to JSON.""" return json.dumps(RateLimitConfigMapper.to_dict(config)) - - -class RateLimitStatusMapper: - @staticmethod - def to_dict(status: RateLimitStatus) -> Dict[str, Any]: - return asdict(status) diff --git a/backend/app/infrastructure/mappers/replay_api_mapper.py b/backend/app/infrastructure/mappers/replay_api_mapper.py index 1b3a842f..37aabe96 100644 --- a/backend/app/infrastructure/mappers/replay_api_mapper.py +++ b/backend/app/infrastructure/mappers/replay_api_mapper.py @@ -3,62 +3,9 @@ from app.domain.enums.replay import ReplayStatus from app.domain.replay import ReplayConfig, ReplayFilter, ReplaySessionState from app.schemas_pydantic.replay import CleanupResponse, ReplayRequest, ReplayResponse, SessionSummary -from app.schemas_pydantic.replay_models import ( - ReplayConfigSchema, - ReplayFilterSchema, - ReplaySession, -) class ReplayApiMapper: - @staticmethod - def filter_to_schema(f: ReplayFilter) -> ReplayFilterSchema: - return ReplayFilterSchema( - execution_id=f.execution_id, - event_types=[str(et) for et in f.event_types] if f.event_types else None, - start_time=f.start_time, - end_time=f.end_time, - user_id=f.user_id, - service_name=f.service_name, - custom_query=f.custom_query, - exclude_event_types=[str(et) for et in f.exclude_event_types] if f.exclude_event_types else None, - ) - - @staticmethod - def config_to_schema(c: ReplayConfig) -> ReplayConfigSchema: - return ReplayConfigSchema( - replay_type=c.replay_type, - target=c.target, - filter=ReplayApiMapper.filter_to_schema(c.filter), - speed_multiplier=c.speed_multiplier, - preserve_timestamps=c.preserve_timestamps, - batch_size=c.batch_size, - max_events=c.max_events, - target_topics={str(k): v for k, v in (c.target_topics or {}).items()}, - target_file_path=c.target_file_path, - skip_errors=c.skip_errors, - retry_failed=c.retry_failed, - retry_attempts=c.retry_attempts, - enable_progress_tracking=c.enable_progress_tracking, - ) - - @staticmethod - def session_to_response(state: ReplaySessionState) -> ReplaySession: - return ReplaySession( - session_id=state.session_id, - config=ReplayApiMapper.config_to_schema(state.config), - status=state.status, - total_events=state.total_events, - replayed_events=state.replayed_events, - failed_events=state.failed_events, - skipped_events=state.skipped_events, - created_at=state.created_at, - started_at=state.started_at, - completed_at=state.completed_at, - last_event_at=state.last_event_at, - errors=state.errors, - ) - @staticmethod def session_to_summary(state: ReplaySessionState) -> SessionSummary: duration = None @@ -94,10 +41,19 @@ def request_to_filter(req: ReplayRequest) -> ReplayFilter: end_time=req.end_time if req.end_time else None, user_id=req.user_id, service_name=req.service_name, + custom_query=req.custom_query, + exclude_event_types=req.exclude_event_types, ) @staticmethod def request_to_config(req: ReplayRequest) -> ReplayConfig: + # Convert string keys to EventType for target_topics if provided + target_topics = None + if req.target_topics: + from app.domain.enums.events import EventType + + target_topics = {EventType(k): v for k, v in req.target_topics.items()} + return ReplayConfig( replay_type=req.replay_type, target=req.target, @@ -108,6 +64,10 @@ def request_to_config(req: ReplayRequest) -> ReplayConfig: max_events=req.max_events, skip_errors=req.skip_errors, target_file_path=req.target_file_path, + target_topics=target_topics, + retry_failed=req.retry_failed, + retry_attempts=req.retry_attempts, + enable_progress_tracking=req.enable_progress_tracking, ) @staticmethod diff --git a/backend/app/infrastructure/mappers/replay_mapper.py b/backend/app/infrastructure/mappers/replay_mapper.py index c1ee2efb..cab31899 100644 --- a/backend/app/infrastructure/mappers/replay_mapper.py +++ b/backend/app/infrastructure/mappers/replay_mapper.py @@ -4,7 +4,6 @@ from app.domain.admin import ( ReplayQuery, ReplaySession, - ReplaySessionData, ReplaySessionFields, ReplaySessionStatusDetail, ReplaySessionStatusInfo, @@ -150,30 +149,6 @@ def to_mongodb_query(query: ReplayQuery) -> dict[str, Any]: return mongo_query -class ReplaySessionDataMapper: - @staticmethod - def to_dict(data: ReplaySessionData) -> dict[str, Any]: - result = { - "dry_run": data.dry_run, - "total_events": data.total_events, - "replay_correlation_id": data.replay_correlation_id, - "query": data.query, - } - - if data.dry_run and data.events_preview: - result["events_preview"] = [ - { - "event_id": e.event_id, - "event_type": e.event_type, - "timestamp": e.timestamp, - "aggregate_id": e.aggregate_id, - } - for e in data.events_preview - ] - - return result - - class ReplayApiMapper: """API-level mapper for converting replay requests to domain queries.""" diff --git a/backend/app/infrastructure/mappers/saga_mapper.py b/backend/app/infrastructure/mappers/saga_mapper.py index ba631abb..26e92bbd 100644 --- a/backend/app/infrastructure/mappers/saga_mapper.py +++ b/backend/app/infrastructure/mappers/saga_mapper.py @@ -1,10 +1,7 @@ -from typing import Any, List +from typing import Any from app.domain.enums.saga import SagaState from app.domain.saga.models import Saga, SagaFilter, SagaInstance -from app.infrastructure.kafka.events.metadata import EventMetadata -from app.infrastructure.kafka.events.saga import SagaCancelledEvent -from app.schemas_pydantic.saga import SagaStatusResponse class SagaMapper: @@ -72,46 +69,6 @@ def from_instance(self, instance: SagaInstance) -> Saga: retry_count=instance.retry_count, ) - def to_dict(self, saga: Saga) -> dict[str, Any]: - """Convert domain model to dictionary for API responses.""" - return { - "saga_id": saga.saga_id, - "saga_name": saga.saga_name, - "execution_id": saga.execution_id, - "state": saga.state.value, - "current_step": saga.current_step, - "completed_steps": saga.completed_steps, - "compensated_steps": saga.compensated_steps, - "error_message": saga.error_message, - "created_at": saga.created_at.isoformat(), - "updated_at": saga.updated_at.isoformat(), - "completed_at": saga.completed_at.isoformat() if saga.completed_at else None, - "retry_count": saga.retry_count, - } - - -class SagaResponseMapper: - """Maps saga domain models to Pydantic response models (API edge only).""" - - def to_response(self, saga: Saga) -> SagaStatusResponse: - return SagaStatusResponse( - saga_id=saga.saga_id, - saga_name=saga.saga_name, - execution_id=saga.execution_id, - state=saga.state, - current_step=saga.current_step, - completed_steps=saga.completed_steps, - compensated_steps=saga.compensated_steps, - error_message=saga.error_message, - created_at=saga.created_at.isoformat(), - updated_at=saga.updated_at.isoformat(), - completed_at=saga.completed_at.isoformat() if saga.completed_at else None, - retry_count=saga.retry_count, - ) - - def list_to_responses(self, sagas: List[Saga]) -> List[SagaStatusResponse]: - return [self.to_response(s) for s in sagas] - class SagaInstanceMapper: """Maps SagaInstance domain <-> Mongo documents.""" @@ -180,37 +137,6 @@ def to_mongo(instance: SagaInstance) -> dict[str, Any]: } -class SagaEventMapper: - """Maps saga domain objects to typed Kafka events.""" - - @staticmethod - def to_cancelled_event( - instance: SagaInstance, - *, - user_id: str | None = None, - service_name: str = "saga-orchestrator", - service_version: str = "1.0.0", - ) -> SagaCancelledEvent: - cancelled_by = user_id or instance.context_data.get("user_id") or "system" - metadata = EventMetadata( - service_name=service_name, - service_version=service_version, - user_id=cancelled_by, - ) - - return SagaCancelledEvent( - saga_id=instance.saga_id, - saga_name=instance.saga_name, - execution_id=instance.execution_id, - reason=instance.error_message or "User requested cancellation", - completed_steps=instance.completed_steps, - compensated_steps=instance.compensated_steps, - cancelled_at=instance.completed_at, - cancelled_by=cancelled_by, - metadata=metadata, - ) - - class SagaFilterMapper: """Maps saga filters to MongoDB queries.""" diff --git a/backend/app/infrastructure/mappers/saved_script_api_mapper.py b/backend/app/infrastructure/mappers/saved_script_api_mapper.py deleted file mode 100644 index c759e494..00000000 --- a/backend/app/infrastructure/mappers/saved_script_api_mapper.py +++ /dev/null @@ -1,52 +0,0 @@ -from __future__ import annotations - -from typing import List - -from app.domain.saved_script import ( - DomainSavedScript, - DomainSavedScriptCreate, - DomainSavedScriptUpdate, -) -from app.schemas_pydantic.saved_script import ( - SavedScriptCreateRequest, - SavedScriptResponse, -) - - -class SavedScriptApiMapper: - @staticmethod - def request_to_create(req: SavedScriptCreateRequest) -> DomainSavedScriptCreate: - return DomainSavedScriptCreate( - name=req.name, - script=req.script, - lang=req.lang, - lang_version=req.lang_version, - description=req.description, - ) - - @staticmethod - def request_to_update(req: SavedScriptCreateRequest) -> DomainSavedScriptUpdate: - return DomainSavedScriptUpdate( - name=req.name, - script=req.script, - lang=req.lang, - lang_version=req.lang_version, - description=req.description, - ) - - @staticmethod - def to_response(s: DomainSavedScript) -> SavedScriptResponse: - return SavedScriptResponse( - script_id=s.script_id, - name=s.name, - script=s.script, - lang=s.lang, - lang_version=s.lang_version, - description=s.description, - created_at=s.created_at, - updated_at=s.updated_at, - ) - - @staticmethod - def list_to_response(items: List[DomainSavedScript]) -> List[SavedScriptResponse]: - return [SavedScriptApiMapper.to_response(i) for i in items] diff --git a/backend/app/infrastructure/mappers/user_settings_api_mapper.py b/backend/app/infrastructure/mappers/user_settings_api_mapper.py deleted file mode 100644 index 95afb3fd..00000000 --- a/backend/app/infrastructure/mappers/user_settings_api_mapper.py +++ /dev/null @@ -1,102 +0,0 @@ -from __future__ import annotations - -from typing import List - -from app.domain.user.settings_models import ( - DomainEditorSettings, - DomainNotificationSettings, - DomainSettingsHistoryEntry, - DomainUserSettings, - DomainUserSettingsUpdate, -) -from app.schemas_pydantic.user_settings import ( - EditorSettings, - NotificationSettings, - SettingsHistoryEntry, - SettingsHistoryResponse, - UserSettings, - UserSettingsUpdate, -) - - -class UserSettingsApiMapper: - @staticmethod - def to_domain_update(upd: UserSettingsUpdate) -> DomainUserSettingsUpdate: - notifications = ( - UserSettingsApiMapper._to_domain_notifications(upd.notifications) if upd.notifications is not None else None - ) - return DomainUserSettingsUpdate( - theme=upd.theme, - timezone=upd.timezone, - date_format=upd.date_format, - time_format=upd.time_format, - notifications=notifications, - editor=UserSettingsApiMapper._to_domain_editor(upd.editor) if upd.editor is not None else None, - custom_settings=upd.custom_settings, - ) - - @staticmethod - def _to_domain_notifications(n: NotificationSettings) -> DomainNotificationSettings: - return DomainNotificationSettings( - execution_completed=n.execution_completed, - execution_failed=n.execution_failed, - system_updates=n.system_updates, - security_alerts=n.security_alerts, - channels=list(n.channels) if n.channels is not None else [], - ) - - @staticmethod - def _to_domain_editor(e: EditorSettings) -> DomainEditorSettings: - return DomainEditorSettings( - theme=e.theme, - font_size=e.font_size, - tab_size=e.tab_size, - use_tabs=e.use_tabs, - word_wrap=e.word_wrap, - show_line_numbers=e.show_line_numbers, - ) - - @staticmethod - def to_api_settings(s: DomainUserSettings) -> UserSettings: - return UserSettings( - user_id=s.user_id, - theme=s.theme, - timezone=s.timezone, - date_format=s.date_format, - time_format=s.time_format, - notifications=NotificationSettings( - execution_completed=s.notifications.execution_completed, - execution_failed=s.notifications.execution_failed, - system_updates=s.notifications.system_updates, - security_alerts=s.notifications.security_alerts, - channels=s.notifications.channels, - ), - editor=EditorSettings( - theme=s.editor.theme, - font_size=s.editor.font_size, - tab_size=s.editor.tab_size, - use_tabs=s.editor.use_tabs, - word_wrap=s.editor.word_wrap, - show_line_numbers=s.editor.show_line_numbers, - ), - custom_settings=s.custom_settings, - version=s.version, - created_at=s.created_at, - updated_at=s.updated_at, - ) - - @staticmethod - def history_to_api(items: List[DomainSettingsHistoryEntry]) -> SettingsHistoryResponse: - entries = [ - SettingsHistoryEntry( - timestamp=i.timestamp, - event_type=i.event_type, - field=i.field, - old_value=i.old_value, - new_value=i.new_value, - reason=i.reason, - correlation_id=i.correlation_id, - ) - for i in items - ] - return SettingsHistoryResponse(history=entries, total=len(entries)) diff --git a/backend/app/infrastructure/mappers/user_settings_mapper.py b/backend/app/infrastructure/mappers/user_settings_mapper.py index 79813e69..f3428e6c 100644 --- a/backend/app/infrastructure/mappers/user_settings_mapper.py +++ b/backend/app/infrastructure/mappers/user_settings_mapper.py @@ -21,9 +21,16 @@ def from_snapshot_document(doc: dict[str, Any]) -> DomainUserSettings: editor = doc.get("editor", {}) theme = Theme(doc.get("theme", Theme.AUTO)) - # Coerce channels to NotificationChannel list - channels_raw = notifications.get("channels", []) - channels: list[NotificationChannel] = [NotificationChannel(c) for c in channels_raw] + # Use domain dataclass defaults for fallback values + default_notifications = DomainNotificationSettings() + default_editor = DomainEditorSettings() + + # Coerce channels to NotificationChannel list, using domain default if not present + channels_raw = notifications.get("channels") + if channels_raw is not None: + channels: list[NotificationChannel] = [NotificationChannel(c) for c in channels_raw] + else: + channels = default_notifications.channels return DomainUserSettings( user_id=str(doc.get("user_id")), @@ -32,19 +39,19 @@ def from_snapshot_document(doc: dict[str, Any]) -> DomainUserSettings: date_format=doc.get("date_format", "YYYY-MM-DD"), time_format=doc.get("time_format", "24h"), notifications=DomainNotificationSettings( - execution_completed=notifications.get("execution_completed", True), - execution_failed=notifications.get("execution_failed", True), - system_updates=notifications.get("system_updates", True), - security_alerts=notifications.get("security_alerts", True), + execution_completed=notifications.get("execution_completed", default_notifications.execution_completed), + execution_failed=notifications.get("execution_failed", default_notifications.execution_failed), + system_updates=notifications.get("system_updates", default_notifications.system_updates), + security_alerts=notifications.get("security_alerts", default_notifications.security_alerts), channels=channels, ), editor=DomainEditorSettings( - theme=editor.get("theme", "one-dark"), - font_size=editor.get("font_size", 14), - tab_size=editor.get("tab_size", 4), - use_tabs=editor.get("use_tabs", False), - word_wrap=editor.get("word_wrap", True), - show_line_numbers=editor.get("show_line_numbers", True), + theme=editor.get("theme", default_editor.theme), + font_size=editor.get("font_size", default_editor.font_size), + tab_size=editor.get("tab_size", default_editor.tab_size), + use_tabs=editor.get("use_tabs", default_editor.use_tabs), + word_wrap=editor.get("word_wrap", default_editor.word_wrap), + show_line_numbers=editor.get("show_line_numbers", default_editor.show_line_numbers), ), custom_settings=doc.get("custom_settings", {}), version=doc.get("version", 1), diff --git a/backend/app/schemas_pydantic/admin_events.py b/backend/app/schemas_pydantic/admin_events.py index 4212a1ce..2b679a91 100644 --- a/backend/app/schemas_pydantic/admin_events.py +++ b/backend/app/schemas_pydantic/admin_events.py @@ -1,9 +1,10 @@ from datetime import datetime from typing import Any, Dict, List -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from app.domain.enums.events import EventType +from app.schemas_pydantic.events import HourlyEventCountSchema class EventFilter(BaseModel): @@ -72,6 +73,8 @@ class EventReplayResponse(BaseModel): class EventReplayStatusResponse(BaseModel): """Response model for replay status""" + model_config = ConfigDict(from_attributes=True) + session_id: str status: str total_events: int @@ -95,12 +98,23 @@ class EventDeleteResponse(BaseModel): event_id: str +class UserEventCountSchema(BaseModel): + """User event count schema""" + + model_config = ConfigDict(from_attributes=True) + + user_id: str + event_count: int + + class EventStatsResponse(BaseModel): """Response model for event statistics""" + model_config = ConfigDict(from_attributes=True) + total_events: int events_by_type: Dict[str, int] - events_by_hour: List[Dict[str, Any]] - top_users: List[Dict[str, Any]] + events_by_hour: List[HourlyEventCountSchema] + top_users: List[UserEventCountSchema] error_rate: float avg_processing_time: float diff --git a/backend/app/schemas_pydantic/dlq.py b/backend/app/schemas_pydantic/dlq.py index a52a37ae..a9815820 100644 --- a/backend/app/schemas_pydantic/dlq.py +++ b/backend/app/schemas_pydantic/dlq.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import Any -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict from app.dlq import DLQMessageStatus, RetryStrategy from app.domain.enums.events import EventType @@ -10,6 +10,8 @@ class DLQStats(BaseModel): """Statistics for the Dead Letter Queue.""" + model_config = ConfigDict(from_attributes=True) + by_status: dict[str, int] by_topic: list[dict[str, Any]] by_event_type: list[dict[str, Any]] @@ -20,6 +22,8 @@ class DLQStats(BaseModel): class DLQMessageResponse(BaseModel): """Response model for a DLQ message.""" + model_config = ConfigDict(from_attributes=True) + event_id: str event_type: EventType original_topic: str @@ -28,7 +32,11 @@ class DLQMessageResponse(BaseModel): failed_at: datetime status: DLQMessageStatus age_seconds: float - details: dict[str, Any] + producer_id: str + dlq_offset: int | None = None + dlq_partition: int | None = None + last_error: str | None = None + next_retry_at: datetime | None = None class RetryPolicyRequest(BaseModel): @@ -51,6 +59,8 @@ class ManualRetryRequest(BaseModel): class DLQMessagesResponse(BaseModel): """Response model for listing DLQ messages.""" + model_config = ConfigDict(from_attributes=True) + messages: list[DLQMessageResponse] total: int offset: int @@ -60,6 +70,8 @@ class DLQMessagesResponse(BaseModel): class DLQBatchRetryResponse(BaseModel): """Response model for batch retry operation.""" + model_config = ConfigDict(from_attributes=True) + total: int successful: int failed: int @@ -69,6 +81,8 @@ class DLQBatchRetryResponse(BaseModel): class DLQTopicSummaryResponse(BaseModel): """Response model for topic summary.""" + model_config = ConfigDict(from_attributes=True) + topic: str total_messages: int status_breakdown: dict[str, int] @@ -81,6 +95,8 @@ class DLQTopicSummaryResponse(BaseModel): class DLQMessageDetail(BaseModel): """Detailed DLQ message response.""" + model_config = ConfigDict(from_attributes=True) + event_id: str event: dict[str, Any] # BaseEvent as dict event_type: EventType diff --git a/backend/app/schemas_pydantic/events.py b/backend/app/schemas_pydantic/events.py index daa6c8bf..346b91a7 100644 --- a/backend/app/schemas_pydantic/events.py +++ b/backend/app/schemas_pydantic/events.py @@ -12,10 +12,34 @@ from app.domain.enums.common import SortOrder from app.domain.enums.events import EventType -from app.infrastructure.kafka.events.metadata import EventMetadata + + +class HourlyEventCountSchema(BaseModel): + """Hourly event count for statistics.""" + + model_config = ConfigDict(from_attributes=True) + + hour: str + count: int + + +class EventMetadataResponse(BaseModel): + """Pydantic schema for event metadata in API responses.""" + + model_config = ConfigDict(from_attributes=True) + + service_name: str + service_version: str + correlation_id: str + user_id: str | None = None + ip_address: str | None = None + user_agent: str | None = None + environment: str = "production" class EventResponse(BaseModel): + model_config = ConfigDict(from_attributes=True) + event_id: str event_type: EventType event_version: str @@ -23,12 +47,14 @@ class EventResponse(BaseModel): aggregate_id: str | None = None correlation_id: str | None = None causation_id: str | None = None - metadata: Dict[str, Any] + metadata: EventMetadataResponse payload: Dict[str, Any] stored_at: datetime | None = None class EventListResponse(BaseModel): + model_config = ConfigDict(from_attributes=True) + events: List[EventResponse] total: int limit: int @@ -89,7 +115,7 @@ class EventBase(BaseModel): aggregate_id: str | None = None correlation_id: str | None = None causation_id: str | None = None # ID of the event that caused this event - metadata: EventMetadata + metadata: EventMetadataResponse payload: Dict[str, Any] model_config = ConfigDict( @@ -188,11 +214,12 @@ class EventStatistics(BaseModel): total_events: int events_by_type: Dict[str, int] events_by_service: Dict[str, int] - events_by_hour: List[Dict[str, Any]] + events_by_hour: List[HourlyEventCountSchema] start_time: datetime | None = None end_time: datetime | None = None model_config = ConfigDict( + from_attributes=True, json_schema_extra={ "example": { "total_events": 1543, @@ -207,7 +234,7 @@ class EventStatistics(BaseModel): {"hour": "2024-01-20 11:00", "count": 92}, ], } - } + }, ) diff --git a/backend/app/schemas_pydantic/replay.py b/backend/app/schemas_pydantic/replay.py index 73bd7556..9949884e 100644 --- a/backend/app/schemas_pydantic/replay.py +++ b/backend/app/schemas_pydantic/replay.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import List +from typing import Any, Dict, List from pydantic import BaseModel, Field @@ -20,6 +20,8 @@ class ReplayRequest(BaseModel): end_time: datetime | None = None user_id: str | None = None service_name: str | None = None + custom_query: Dict[str, Any] | None = None + exclude_event_types: List[EventType] | None = None # Replay configuration speed_multiplier: float = Field(default=1.0, ge=0.1, le=100.0) @@ -28,6 +30,10 @@ class ReplayRequest(BaseModel): max_events: int | None = Field(default=None, ge=1) skip_errors: bool = True target_file_path: str | None = None + target_topics: Dict[str, str] | None = None + retry_failed: bool = False + retry_attempts: int = Field(default=3, ge=1, le=10) + enable_progress_tracking: bool = True class ReplayResponse(BaseModel): diff --git a/backend/app/schemas_pydantic/replay_models.py b/backend/app/schemas_pydantic/replay_models.py index 34ad991d..d5fa4c2d 100644 --- a/backend/app/schemas_pydantic/replay_models.py +++ b/backend/app/schemas_pydantic/replay_models.py @@ -2,7 +2,7 @@ from typing import Any, Dict, List from uuid import uuid4 -from pydantic import BaseModel, Field, field_validator, model_validator +from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType from app.domain.replay import ReplayConfig as DomainReplayConfig @@ -10,6 +10,8 @@ class ReplayFilterSchema(BaseModel): + model_config = ConfigDict(from_attributes=True) + execution_id: str | None = None event_types: List[str] | None = None start_time: datetime | None = None @@ -34,6 +36,8 @@ def from_domain(cls, f: DomainReplayFilter) -> "ReplayFilterSchema": class ReplayConfigSchema(BaseModel): + model_config = ConfigDict(from_attributes=True) + replay_type: ReplayType target: ReplayTarget = ReplayTarget.KAFKA filter: ReplayFilterSchema @@ -77,6 +81,8 @@ def _from_domain(cls, data: Any) -> Any: # noqa: ANN001 class ReplaySession(BaseModel): + model_config = ConfigDict(from_attributes=True) + session_id: str = Field(default_factory=lambda: str(uuid4())) config: ReplayConfigSchema status: ReplayStatus = ReplayStatus.CREATED diff --git a/backend/app/schemas_pydantic/saga.py b/backend/app/schemas_pydantic/saga.py index ebf4abca..217b469d 100644 --- a/backend/app/schemas_pydantic/saga.py +++ b/backend/app/schemas_pydantic/saga.py @@ -44,6 +44,9 @@ class SagaListResponse(BaseModel): sagas: list[SagaStatusResponse] total: int + skip: int + limit: int + has_more: bool class SagaCancellationResponse(BaseModel): diff --git a/backend/app/schemas_pydantic/user_settings.py b/backend/app/schemas_pydantic/user_settings.py index 5fcc30d5..77d3d2fd 100644 --- a/backend/app/schemas_pydantic/user_settings.py +++ b/backend/app/schemas_pydantic/user_settings.py @@ -1,7 +1,7 @@ from datetime import datetime, timezone from typing import Any, Dict, List -from pydantic import BaseModel, Field, field_validator +from pydantic import BaseModel, ConfigDict, Field, field_validator from app.domain.enums.common import Theme from app.domain.enums.events import EventType @@ -11,6 +11,8 @@ class NotificationSettings(BaseModel): """User notification preferences""" + model_config = ConfigDict(from_attributes=True) + execution_completed: bool = True execution_failed: bool = True system_updates: bool = True @@ -21,6 +23,8 @@ class NotificationSettings(BaseModel): class EditorSettings(BaseModel): """Code editor preferences""" + model_config = ConfigDict(from_attributes=True) + theme: str = "auto" font_size: int = 14 tab_size: int = 4 @@ -46,6 +50,8 @@ def validate_tab_size(cls, v: int) -> int: class UserSettings(BaseModel): """Complete user settings model""" + model_config = ConfigDict(from_attributes=True) + user_id: str theme: Theme = Theme.AUTO timezone: str = "UTC" @@ -62,6 +68,8 @@ class UserSettings(BaseModel): class UserSettingsUpdate(BaseModel): """Partial update model for user settings""" + model_config = ConfigDict(from_attributes=True) + theme: Theme | None = None timezone: str | None = None date_format: str | None = None @@ -90,6 +98,8 @@ class ThemeUpdateRequest(BaseModel): class SettingsHistoryEntry(BaseModel): """Single entry in settings history""" + model_config = ConfigDict(from_attributes=True) + timestamp: datetime event_type: EventType field: str @@ -100,10 +110,12 @@ class SettingsHistoryEntry(BaseModel): class SettingsHistoryResponse(BaseModel): - """Response model for settings history""" + """Response model for settings history (limited snapshot of recent changes)""" + + model_config = ConfigDict(from_attributes=True) history: List[SettingsHistoryEntry] - total: int + limit: int class RestoreSettingsRequest(BaseModel): diff --git a/backend/app/services/admin/admin_events_service.py b/backend/app/services/admin/admin_events_service.py index fe70a45f..2a9ed6e8 100644 --- a/backend/app/services/admin/admin_events_service.py +++ b/backend/app/services/admin/admin_events_service.py @@ -1,6 +1,6 @@ import csv import json -from dataclasses import dataclass +from dataclasses import asdict, dataclass from datetime import datetime, timezone from io import StringIO from typing import Any, Dict, List @@ -21,7 +21,7 @@ EventStatistics, ) from app.domain.replay import ReplayConfig, ReplayFilter -from app.infrastructure.mappers import EventExportRowMapper, EventMapper +from app.infrastructure.mappers import EventExportRowMapper from app.services.replay_service import ReplayService @@ -220,13 +220,12 @@ async def export_events_json_content(self, *, event_filter: EventFilter, limit: result = await self._repo.browse_events( event_filter=event_filter, skip=0, limit=limit, sort_by="timestamp", sort_order=-1 ) - event_mapper = EventMapper() events_data: list[dict[str, Any]] = [] for event in result.events: - event_dict = event_mapper.to_dict(event) - for field in ["timestamp", "created_at", "updated_at", "stored_at", "ttl_expires_at"]: - if field in event_dict and isinstance(event_dict[field], datetime): - event_dict[field] = event_dict[field].isoformat() + event_dict = asdict(event) + for fld in ["timestamp", "created_at", "updated_at", "stored_at", "ttl_expires_at"]: + if fld in event_dict and isinstance(event_dict[fld], datetime): + event_dict[fld] = event_dict[fld].isoformat() events_data.append(event_dict) export_data: dict[str, Any] = { diff --git a/backend/app/services/coordinator/coordinator.py b/backend/app/services/coordinator/coordinator.py index a6d562a8..b7843425 100644 --- a/backend/app/services/coordinator/coordinator.py +++ b/backend/app/services/coordinator/coordinator.py @@ -33,7 +33,7 @@ ExecutionFailedEvent, ExecutionRequestedEvent, ) -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata as EventMetadata from app.infrastructure.kafka.events.saga import CreatePodCommandEvent from app.services.coordinator.queue_manager import QueueManager, QueuePriority from app.services.coordinator.resource_manager import ResourceAllocation, ResourceManager diff --git a/backend/app/services/event_service.py b/backend/app/services/event_service.py index 28e0c61e..77211181 100644 --- a/backend/app/services/event_service.py +++ b/backend/app/services/event_service.py @@ -34,13 +34,21 @@ async def get_execution_events( user_id: str, user_role: UserRole, include_system_events: bool = False, - ) -> list[Event] | None: - events = await self.repository.get_events_by_aggregate(aggregate_id=execution_id, limit=1000) - if not events: - return [] + limit: int = 1000, + skip: int = 0, + ) -> EventListResult | None: + # Filter system events at DB level for accurate pagination + result = await self.repository.get_execution_events( + execution_id=execution_id, + limit=limit, + skip=skip, + exclude_system_events=not include_system_events, + ) + if not result.events: + return EventListResult(events=[], total=0, skip=skip, limit=limit, has_more=False) owner = None - for e in events: + for e in result.events: if e.metadata and e.metadata.user_id: owner = e.metadata.user_id break @@ -48,10 +56,7 @@ async def get_execution_events( if owner and owner != user_id and user_role != UserRole.ADMIN: return None - if not include_system_events: - events = [e for e in events if not (e.metadata and e.metadata.service_name.startswith("system-"))] - - return events + return result async def get_user_events_paginated( self, @@ -118,11 +123,19 @@ async def get_events_by_correlation( user_role: UserRole, include_all_users: bool = False, limit: int = 100, - ) -> list[Event]: - events = await self.repository.get_events_by_correlation(correlation_id=correlation_id, limit=limit) + skip: int = 0, + ) -> EventListResult: + result = await self.repository.get_events_by_correlation(correlation_id=correlation_id, limit=limit, skip=skip) if not include_all_users or user_role != UserRole.ADMIN: - events = [e for e in events if (e.metadata and e.metadata.user_id == user_id)] - return events + filtered = [e for e in result.events if (e.metadata and e.metadata.user_id == user_id)] + return EventListResult( + events=filtered, + total=result.total, + skip=skip, + limit=limit, + has_more=result.has_more, + ) + return result async def get_event_statistics( self, diff --git a/backend/app/services/execution_service.py b/backend/app/services/execution_service.py index 9b2cebfc..33001a73 100644 --- a/backend/app/services/execution_service.py +++ b/backend/app/services/execution_service.py @@ -23,7 +23,7 @@ ExecutionCancelledEvent, ExecutionRequestedEvent, ) -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata as EventMetadata from app.runtime_registry import RUNTIME_REGISTRY from app.settings import Settings diff --git a/backend/app/services/kafka_event_service.py b/backend/app/services/kafka_event_service.py index 1f32029a..166f634f 100644 --- a/backend/app/services/kafka_event_service.py +++ b/backend/app/services/kafka_event_service.py @@ -12,8 +12,9 @@ from app.db.repositories.event_repository import EventRepository from app.domain.enums.events import EventType from app.domain.events import Event +from app.domain.events import EventMetadata as DomainEventMetadata from app.events.core import UnifiedProducer -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.kafka.mappings import get_event_class_for_type from app.settings import get_settings @@ -33,7 +34,7 @@ async def publish_event( payload: Dict[str, Any], aggregate_id: str | None, correlation_id: str | None = None, - metadata: EventMetadata | None = None, + metadata: AvroEventMetadata | None = None, ) -> str: """ Publish an event to Kafka and store an audit copy via the repository @@ -58,24 +59,27 @@ async def publish_event( if not correlation_id: correlation_id = CorrelationContext.get_correlation_id() - # Create or enrich event metadata - event_metadata = metadata or EventMetadata( + # Create or enrich event metadata (Avro for Kafka) + avro_metadata = metadata or AvroEventMetadata( service_name=self.settings.SERVICE_NAME, service_version=self.settings.SERVICE_VERSION, ) - event_metadata = event_metadata.with_correlation(correlation_id or str(uuid4())) + avro_metadata = avro_metadata.with_correlation(correlation_id or str(uuid4())) # Create event event_id = str(uuid4()) timestamp = datetime.now(timezone.utc) - # Create domain event (using the unified EventMetadata) + + # Convert to domain metadata for storage + domain_metadata = DomainEventMetadata.from_dict(avro_metadata.to_dict()) + event = Event( event_id=event_id, event_type=event_type, event_version="1.0", timestamp=timestamp, aggregate_id=aggregate_id, - metadata=event_metadata, + metadata=domain_metadata, payload=payload, ) _ = await self.event_repository.store_event(event) @@ -92,7 +96,7 @@ async def publish_event( "event_version": "1.0", "timestamp": timestamp, "aggregate_id": aggregate_id, - "metadata": event_metadata, + "metadata": avro_metadata, **payload, # Include event-specific payload fields } @@ -103,7 +107,7 @@ async def publish_event( headers: Dict[str, str] = { "event_type": event_type, "correlation_id": event.correlation_id or "", - "service": event_metadata.service_name, + "service": avro_metadata.service_name, } # Add trace context @@ -140,7 +144,7 @@ async def publish_execution_event( event_type: EventType, execution_id: str, status: str, - metadata: EventMetadata | None = None, + metadata: AvroEventMetadata | None = None, error_message: str | None = None, ) -> str: """Publish execution-related event using provided metadata (no framework coupling).""" @@ -183,7 +187,7 @@ async def publish_pod_event( execution_id: str, namespace: str = "integr8scode", status: str | None = None, - metadata: EventMetadata | None = None, + metadata: AvroEventMetadata | None = None, ) -> str: """Publish pod-related event""" payload = {"pod_name": pod_name, "execution_id": execution_id, "namespace": namespace} diff --git a/backend/app/services/pod_monitor/event_mapper.py b/backend/app/services/pod_monitor/event_mapper.py index 56eb0966..fc16bc1f 100644 --- a/backend/app/services/pod_monitor/event_mapper.py +++ b/backend/app/services/pod_monitor/event_mapper.py @@ -15,7 +15,7 @@ ExecutionFailedEvent, ExecutionTimeoutEvent, ) -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata as EventMetadata from app.infrastructure.kafka.events.pod import ( PodRunningEvent, PodScheduledEvent, diff --git a/backend/app/services/result_processor/processor.py b/backend/app/services/result_processor/processor.py index 1755fa3d..1e852bf0 100644 --- a/backend/app/services/result_processor/processor.py +++ b/backend/app/services/result_processor/processor.py @@ -23,7 +23,7 @@ ExecutionFailedEvent, ExecutionTimeoutEvent, ) -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata as EventMetadata from app.infrastructure.kafka.events.system import ( ResultFailedEvent, ResultStoredEvent, diff --git a/backend/app/services/saga/execution_saga.py b/backend/app/services/saga/execution_saga.py index 479d7c99..616915f2 100644 --- a/backend/app/services/saga/execution_saga.py +++ b/backend/app/services/saga/execution_saga.py @@ -5,7 +5,7 @@ from app.domain.enums.events import EventType from app.events.core import UnifiedProducer from app.infrastructure.kafka.events.execution import ExecutionRequestedEvent -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata as EventMetadata from app.infrastructure.kafka.events.saga import CreatePodCommandEvent, DeletePodCommandEvent from .base_saga import BaseSaga diff --git a/backend/app/services/saga/saga_orchestrator.py b/backend/app/services/saga/saga_orchestrator.py index c72a0624..89f8c239 100644 --- a/backend/app/services/saga/saga_orchestrator.py +++ b/backend/app/services/saga/saga_orchestrator.py @@ -15,7 +15,7 @@ from app.events.core import ConsumerConfig, EventDispatcher, UnifiedConsumer, UnifiedProducer from app.events.event_store import EventStore from app.infrastructure.kafka.events.base import BaseEvent -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata as EventMetadata from app.infrastructure.kafka.events.saga import SagaCancelledEvent from app.infrastructure.kafka.mappings import get_topic_for_event from app.services.idempotency import IdempotentConsumerWrapper @@ -390,8 +390,8 @@ async def get_saga_status(self, saga_id: str) -> Saga | None: async def get_execution_sagas(self, execution_id: str) -> list[Saga]: """Get all sagas for an execution, sorted by created_at descending (newest first)""" - sagas = await self._repo.get_sagas_by_execution(execution_id) - return sagas + result = await self._repo.get_sagas_by_execution(execution_id) + return result.sagas async def cancel_saga(self, saga_id: str) -> bool: """Cancel a running saga and trigger compensation. diff --git a/backend/app/services/saga/saga_service.py b/backend/app/services/saga/saga_service.py index fd2f1594..27617bb0 100644 --- a/backend/app/services/saga/saga_service.py +++ b/backend/app/services/saga/saga_service.py @@ -63,7 +63,9 @@ async def get_saga_with_access_check(self, saga_id: str, user: User) -> Saga: return saga - async def get_execution_sagas(self, execution_id: str, user: User, state: SagaState | None = None) -> list[Saga]: + async def get_execution_sagas( + self, execution_id: str, user: User, state: SagaState | None = None, limit: int = 100, skip: int = 0 + ) -> SagaListResult: """Get sagas for an execution with access control.""" # Check access to execution if not await self.check_execution_access(execution_id, user): @@ -72,7 +74,7 @@ async def get_execution_sagas(self, execution_id: str, user: User, state: SagaSt ) raise SagaAccessDeniedError(f"Access denied - no access to execution {execution_id}") - return await self.saga_repo.get_sagas_by_execution(execution_id, state) + return await self.saga_repo.get_sagas_by_execution(execution_id, state, limit=limit, skip=skip) async def list_user_sagas( self, user: User, state: SagaState | None = None, limit: int = 100, skip: int = 0 diff --git a/backend/tests/helpers/events.py b/backend/tests/helpers/events.py index 4103b36f..63b6fc15 100644 --- a/backend/tests/helpers/events.py +++ b/backend/tests/helpers/events.py @@ -2,7 +2,7 @@ from typing import Iterable from app.infrastructure.kafka.events.execution import ExecutionRequestedEvent -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata def make_execution_requested_event( @@ -31,7 +31,7 @@ def make_execution_requested_event( if execution_id is None: execution_id = f"exec-{uuid.uuid4().hex[:8]}" - metadata = EventMetadata(service_name=service_name, service_version=service_version, user_id=user_id) + metadata = AvroEventMetadata(service_name=service_name, service_version=service_version, user_id=user_id) return ExecutionRequestedEvent( execution_id=execution_id, script=script, diff --git a/backend/tests/integration/events/test_dlq_handler.py b/backend/tests/integration/events/test_dlq_handler.py index 997718f5..b30b6c6a 100644 --- a/backend/tests/integration/events/test_dlq_handler.py +++ b/backend/tests/integration/events/test_dlq_handler.py @@ -2,7 +2,7 @@ from app.events.core import create_dlq_error_handler, create_immediate_dlq_handler from app.events.core import UnifiedProducer -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.kafka.events.saga import SagaStartedEvent pytestmark = [pytest.mark.integration, pytest.mark.kafka] @@ -19,7 +19,7 @@ async def _record_send_to_dlq(original_event, original_topic, error, retry_count monkeypatch.setattr(p, "send_to_dlq", _record_send_to_dlq) h = create_dlq_error_handler(p, original_topic="t", max_retries=2) e = SagaStartedEvent(saga_id="s", saga_name="n", execution_id="x", initial_event_id="i", - metadata=EventMetadata(service_name="a", service_version="1")) + metadata=AvroEventMetadata(service_name="a", service_version="1")) # Call 1 and 2 should not send to DLQ await h(RuntimeError("boom"), e) await h(RuntimeError("boom"), e) @@ -41,6 +41,6 @@ async def _record_send_to_dlq(original_event, original_topic, error, retry_count monkeypatch.setattr(p, "send_to_dlq", _record_send_to_dlq) h = create_immediate_dlq_handler(p, original_topic="t") e = SagaStartedEvent(saga_id="s2", saga_name="n", execution_id="x", initial_event_id="i", - metadata=EventMetadata(service_name="a", service_version="1")) + metadata=AvroEventMetadata(service_name="a", service_version="1")) await h(RuntimeError("x"), e) assert calls and calls[0][3] == 0 diff --git a/backend/tests/integration/events/test_event_store.py b/backend/tests/integration/events/test_event_store.py index b422268c..6d00e0ad 100644 --- a/backend/tests/integration/events/test_event_store.py +++ b/backend/tests/integration/events/test_event_store.py @@ -4,7 +4,7 @@ from app.events.event_store import EventStore from app.events.schema.schema_registry import SchemaRegistryManager -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.kafka.events.pod import PodCreatedEvent from app.infrastructure.kafka.events.user import UserLoggedInEvent from motor.motor_asyncio import AsyncIOMotorDatabase @@ -27,7 +27,7 @@ async def test_store_and_query_events(event_store: EventStore) -> None: execution_id="x1", pod_name="pod1", namespace="ns", - metadata=EventMetadata(service_name="svc", service_version="1", user_id="u1", correlation_id="cid"), + metadata=AvroEventMetadata(service_name="svc", service_version="1", user_id="u1", correlation_id="cid"), ) assert await event_store.store_event(ev1) is True @@ -35,7 +35,7 @@ async def test_store_and_query_events(event_store: EventStore) -> None: execution_id="x2", pod_name="pod2", namespace="ns", - metadata=EventMetadata(service_name="svc", service_version="1", user_id="u1"), + metadata=AvroEventMetadata(service_name="svc", service_version="1", user_id="u1"), ) res = await event_store.store_batch([ev1, ev2]) assert res["total"] == 2 and res["stored"] >= 1 @@ -55,7 +55,7 @@ async def test_store_and_query_events(event_store: EventStore) -> None: @pytest.mark.asyncio async def test_replay_events(event_store: EventStore) -> None: ev = UserLoggedInEvent(user_id="u1", login_method="password", - metadata=EventMetadata(service_name="svc", service_version="1")) + metadata=AvroEventMetadata(service_name="svc", service_version="1")) await event_store.store_event(ev) called = {"n": 0} diff --git a/backend/tests/integration/events/test_event_store_consumer.py b/backend/tests/integration/events/test_event_store_consumer.py index 5ce24e3e..778f4da1 100644 --- a/backend/tests/integration/events/test_event_store_consumer.py +++ b/backend/tests/integration/events/test_event_store_consumer.py @@ -11,7 +11,7 @@ from app.events.event_store import EventStore from app.events.schema.schema_registry import SchemaRegistryManager, initialize_event_schemas from app.infrastructure.kafka.events.user import UserLoggedInEvent -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata pytestmark = [pytest.mark.integration, pytest.mark.kafka, pytest.mark.mongodb] @@ -32,7 +32,7 @@ async def test_event_store_consumer_stores_events(scope) -> None: # type: ignor ev = UserLoggedInEvent( user_id=f"u-{uuid.uuid4().hex[:6]}", login_method="password", - metadata=EventMetadata(service_name="tests", service_version="1.0.0"), + metadata=AvroEventMetadata(service_name="tests", service_version="1.0.0"), ) # Create a tuned consumer (fast batch timeout) limited to user-events diff --git a/backend/tests/integration/events/test_schema_registry_real.py b/backend/tests/integration/events/test_schema_registry_real.py index c60491ff..962910d2 100644 --- a/backend/tests/integration/events/test_schema_registry_real.py +++ b/backend/tests/integration/events/test_schema_registry_real.py @@ -1,7 +1,7 @@ import pytest from app.events.schema.schema_registry import SchemaRegistryManager -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.kafka.events.pod import PodCreatedEvent pytestmark = [pytest.mark.integration, pytest.mark.kafka] @@ -14,7 +14,7 @@ def test_serialize_and_deserialize_event_real_registry() -> None: execution_id="e1", pod_name="p", namespace="n", - metadata=EventMetadata(service_name="s", service_version="1"), + metadata=AvroEventMetadata(service_name="s", service_version="1"), ) data = m.serialize_event(ev) obj = m.deserialize_event(data, topic=str(ev.topic)) diff --git a/backend/tests/integration/k8s/test_k8s_worker_create_pod.py b/backend/tests/integration/k8s/test_k8s_worker_create_pod.py index 36c01f97..1722ea2d 100644 --- a/backend/tests/integration/k8s/test_k8s_worker_create_pod.py +++ b/backend/tests/integration/k8s/test_k8s_worker_create_pod.py @@ -4,7 +4,7 @@ import pytest from kubernetes.client.rest import ApiException -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.kafka.events.saga import CreatePodCommandEvent from app.services.k8s_worker.config import K8sWorkerConfig from app.services.k8s_worker.worker import KubernetesWorker @@ -63,7 +63,7 @@ async def test_worker_creates_configmap_and_pod(scope, monkeypatch): # type: ig cpu_request="50m", memory_request="64Mi", priority=5, - metadata=EventMetadata(service_name="tests", service_version="1", user_id="u1"), + metadata=AvroEventMetadata(service_name="tests", service_version="1", user_id="u1"), ) # Build and create ConfigMap + Pod diff --git a/backend/tests/integration/result_processor/test_result_processor.py b/backend/tests/integration/result_processor/test_result_processor.py index 927c62b5..af16b600 100644 --- a/backend/tests/integration/result_processor/test_result_processor.py +++ b/backend/tests/integration/result_processor/test_result_processor.py @@ -15,7 +15,7 @@ from app.events.core.types import ConsumerConfig from app.events.schema.schema_registry import SchemaRegistryManager, initialize_event_schemas from app.infrastructure.kafka.events.execution import ExecutionCompletedEvent -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.services.idempotency import IdempotencyManager from app.services.result_processor.processor import ResultProcessor from app.settings import get_settings @@ -88,7 +88,7 @@ async def _stored(_event) -> None: # noqa: ANN001 stdout="hello", stderr="", resource_usage=usage, - metadata=EventMetadata(service_name="tests", service_version="1.0.0"), + metadata=AvroEventMetadata(service_name="tests", service_version="1.0.0"), ) await producer.produce(evt, key=execution_id) diff --git a/backend/tests/integration/services/events/test_event_service_integration.py b/backend/tests/integration/services/events/test_event_service_integration.py index 9d66de5d..0f7257dc 100644 --- a/backend/tests/integration/services/events/test_event_service_integration.py +++ b/backend/tests/integration/services/events/test_event_service_integration.py @@ -6,7 +6,7 @@ from app.domain.events.event_models import EventFields, Event, EventFilter from app.domain.enums.common import SortOrder from app.domain.enums.user import UserRole -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.domain.enums.events import EventType from app.services.event_service import EventService @@ -20,8 +20,8 @@ async def test_event_service_access_and_queries(scope) -> None: # type: ignore[ now = datetime.now(timezone.utc) # Seed some events (domain Event, not infra BaseEvent) - md1 = EventMetadata(service_name="svc", service_version="1", user_id="u1", correlation_id="c1") - md2 = EventMetadata(service_name="svc", service_version="1", user_id="u2", correlation_id="c1") + md1 = AvroEventMetadata(service_name="svc", service_version="1", user_id="u1", correlation_id="c1") + md2 = AvroEventMetadata(service_name="svc", service_version="1", user_id="u2", correlation_id="c1") e1 = Event(event_id="e1", event_type=str(EventType.USER_LOGGED_IN), event_version="1.0", timestamp=now, metadata=md1, payload={"user_id": "u1", "login_method": "password"}, aggregate_id="agg1") e2 = Event(event_id="e2", event_type=str(EventType.USER_LOGGED_IN), event_version="1.0", timestamp=now, @@ -29,11 +29,11 @@ async def test_event_service_access_and_queries(scope) -> None: # type: ignore[ await repo.store_event(e1) await repo.store_event(e2) - # get_execution_events returns [] when non-admin for different user; then admin sees + # get_execution_events returns None when non-admin for different user; then admin sees events_user = await svc.get_execution_events("agg1", "u2", UserRole.USER) assert events_user is None events_admin = await svc.get_execution_events("agg1", "admin", UserRole.ADMIN) - assert any(ev.aggregate_id == "agg1" for ev in events_admin) + assert any(ev.aggregate_id == "agg1" for ev in events_admin.events) # query_events_advanced: basic run (empty filters) should return a result structure res = await svc.query_events_advanced("u1", UserRole.USER, filters=EventFilter(), sort_by="correlation_id", sort_order=SortOrder.ASC) @@ -41,9 +41,9 @@ async def test_event_service_access_and_queries(scope) -> None: # type: ignore[ # get_events_by_correlation filters non-admin to their own user_id by_corr_user = await svc.get_events_by_correlation("c1", user_id="u1", user_role=UserRole.USER, include_all_users=False) - assert all(ev.metadata.user_id == "u1" for ev in by_corr_user) + assert all(ev.metadata.user_id == "u1" for ev in by_corr_user.events) by_corr_admin = await svc.get_events_by_correlation("c1", user_id="admin", user_role=UserRole.ADMIN, include_all_users=True) - assert len(by_corr_admin) >= 2 + assert len(by_corr_admin.events) >= 2 # get_event_statistics (time window) _ = await svc.get_event_statistics("u1", UserRole.USER, start_time=now - timedelta(days=1), end_time=now + timedelta(days=1)) diff --git a/backend/tests/integration/test_sse_routes.py b/backend/tests/integration/test_sse_routes.py index c6c297a9..1078259c 100644 --- a/backend/tests/integration/test_sse_routes.py +++ b/backend/tests/integration/test_sse_routes.py @@ -9,7 +9,7 @@ from app.domain.enums.notification import NotificationSeverity, NotificationStatus from app.schemas_pydantic.sse import RedisNotificationMessage, SSEHealthResponse from app.infrastructure.kafka.events.pod import PodCreatedEvent -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.services.sse.redis_bus import SSERedisBus from app.services.sse.sse_service import SSEService from tests.helpers.eventually import eventually @@ -131,7 +131,7 @@ async def _connected() -> None: execution_id=exec_id, pod_name=f"executor-{exec_id}", namespace="default", - metadata=EventMetadata(service_name="tests", service_version="1"), + metadata=AvroEventMetadata(service_name="tests", service_version="1"), ) await bus.publish_event(exec_id, ev) diff --git a/backend/tests/unit/db/repositories/test_admin_events_repository.py b/backend/tests/unit/db/repositories/test_admin_events_repository.py index 3e6dab67..e574e439 100644 --- a/backend/tests/unit/db/repositories/test_admin_events_repository.py +++ b/backend/tests/unit/db/repositories/test_admin_events_repository.py @@ -7,7 +7,7 @@ from app.domain.admin.replay_updates import ReplaySessionUpdate from app.domain.enums.replay import ReplayStatus from app.domain.events.event_models import EventFields, EventFilter, EventStatistics, Event -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata pytestmark = pytest.mark.unit @@ -21,8 +21,8 @@ def repo(db) -> AdminEventsRepository: # type: ignore[valid-type] async def test_browse_detail_delete_and_export(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type] now = datetime.now(timezone.utc) await db.get_collection("events").insert_many([ - {EventFields.EVENT_ID: "e1", EventFields.EVENT_TYPE: "X", EventFields.TIMESTAMP: now, EventFields.METADATA: EventMetadata(service_name="svc", service_version="1", correlation_id="c1").to_dict()}, - {EventFields.EVENT_ID: "e2", EventFields.EVENT_TYPE: "X", EventFields.TIMESTAMP: now, EventFields.METADATA: EventMetadata(service_name="svc", service_version="1", correlation_id="c1").to_dict()}, + {EventFields.EVENT_ID: "e1", EventFields.EVENT_TYPE: "X", EventFields.TIMESTAMP: now, EventFields.METADATA: AvroEventMetadata(service_name="svc", service_version="1", correlation_id="c1").to_dict()}, + {EventFields.EVENT_ID: "e2", EventFields.EVENT_TYPE: "X", EventFields.TIMESTAMP: now, EventFields.METADATA: AvroEventMetadata(service_name="svc", service_version="1", correlation_id="c1").to_dict()}, ]) res = await repo.browse_events(EventFilter()) assert res.total >= 2 @@ -37,12 +37,12 @@ async def test_browse_detail_delete_and_export(repo: AdminEventsRepository, db) async def test_event_stats_and_archive(repo: AdminEventsRepository, db) -> None: # type: ignore[valid-type] now = datetime.now(timezone.utc) await db.get_collection("events").insert_many([ - {EventFields.EVENT_ID: "e10", EventFields.EVENT_TYPE: "step.completed", EventFields.TIMESTAMP: now, EventFields.METADATA: EventMetadata(service_name="svc", service_version="1", user_id="u1").to_dict()}, + {EventFields.EVENT_ID: "e10", EventFields.EVENT_TYPE: "step.completed", EventFields.TIMESTAMP: now, EventFields.METADATA: AvroEventMetadata(service_name="svc", service_version="1", user_id="u1").to_dict()}, ]) await db.get_collection("executions").insert_one({"created_at": now, "status": "completed", "resource_usage": {"execution_time_wall_seconds": 1.25}}) stats = await repo.get_event_stats(hours=1) assert isinstance(stats, EventStatistics) - ev = Event(event_id="a1", event_type="X", event_version="1.0", timestamp=now, metadata=EventMetadata(service_name="s", service_version="1"), payload={}) + ev = Event(event_id="a1", event_type="X", event_version="1.0", timestamp=now, metadata=AvroEventMetadata(service_name="s", service_version="1"), payload={}) assert await repo.archive_event(ev, deleted_by="admin") is True diff --git a/backend/tests/unit/db/repositories/test_event_repository.py b/backend/tests/unit/db/repositories/test_event_repository.py index 66559488..5fe402f4 100644 --- a/backend/tests/unit/db/repositories/test_event_repository.py +++ b/backend/tests/unit/db/repositories/test_event_repository.py @@ -4,7 +4,7 @@ from app.db.repositories.event_repository import EventRepository from app.domain.events.event_models import Event, EventFields, EventFilter -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata pytestmark = pytest.mark.unit @@ -20,7 +20,7 @@ def make_event(event_id: str, etype: str = "UserLoggedIn", user: str | None = "u event_type=etype, event_version="1.0", timestamp=datetime.now(timezone.utc), - metadata=EventMetadata(service_name="svc", service_version="1", user_id=user, correlation_id="c1"), + metadata=AvroEventMetadata(service_name="svc", service_version="1", user_id=user, correlation_id="c1"), payload={"k": 1, "execution_id": agg} if agg else {"k": 1}, aggregate_id=agg, ) @@ -41,18 +41,18 @@ async def test_store_get_and_queries(repo: EventRepository, db) -> None: # type by_agg = await repo.get_events_by_aggregate("x2") assert any(ev.event_id == "e2" for ev in by_agg) by_corr = await repo.get_events_by_correlation("c1") - assert len(by_corr) >= 2 + assert len(by_corr.events) >= 2 by_user = await repo.get_events_by_user("u1", limit=10) assert len(by_user) >= 2 exec_events = await repo.get_execution_events("x1") - assert any(ev.event_id == "e1" for ev in exec_events) + assert any(ev.event_id == "e1" for ev in exec_events.events) @pytest.mark.asyncio async def test_statistics_and_search_and_delete(repo: EventRepository, db) -> None: # type: ignore[valid-type] now = datetime.now(timezone.utc) await db.get_collection("events").insert_many([ - {EventFields.EVENT_ID: "e3", EventFields.EVENT_TYPE: "C", EventFields.EVENT_VERSION: "1.0", EventFields.TIMESTAMP: now, EventFields.METADATA: EventMetadata(service_name="svc", service_version="1").to_dict(), EventFields.PAYLOAD: {}}, + {EventFields.EVENT_ID: "e3", EventFields.EVENT_TYPE: "C", EventFields.EVENT_VERSION: "1.0", EventFields.TIMESTAMP: now, EventFields.METADATA: AvroEventMetadata(service_name="svc", service_version="1").to_dict(), EventFields.PAYLOAD: {}}, ]) stats = await repo.get_event_statistics(start_time=now - timedelta(days=1), end_time=now + timedelta(days=1)) assert stats.total_events >= 1 diff --git a/backend/tests/unit/dlq/test_dlq_models.py b/backend/tests/unit/dlq/test_dlq_models.py index 16497625..b1104c79 100644 --- a/backend/tests/unit/dlq/test_dlq_models.py +++ b/backend/tests/unit/dlq/test_dlq_models.py @@ -14,7 +14,7 @@ ) from app.domain.enums.events import EventType from app.events.schema.schema_registry import SchemaRegistryManager -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.kafka.events.user import UserLoggedInEvent from app.infrastructure.mappers.dlq_mapper import DLQMapper @@ -25,7 +25,7 @@ def _make_event() -> UserLoggedInEvent: return UserLoggedInEvent( user_id="u1", login_method=LoginMethod.PASSWORD, - metadata=EventMetadata(service_name="svc", service_version="1"), + metadata=AvroEventMetadata(service_name="svc", service_version="1"), ) diff --git a/backend/tests/unit/events/test_metadata_model.py b/backend/tests/unit/events/test_metadata_model.py index 38de1732..94afa349 100644 --- a/backend/tests/unit/events/test_metadata_model.py +++ b/backend/tests/unit/events/test_metadata_model.py @@ -1,15 +1,36 @@ -from app.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata -def test_metadata_helpers(): - m = EventMetadata(service_name="s", service_version="1") +def test_to_dict() -> None: + m = AvroEventMetadata(service_name="svc", service_version="1.0") d = m.to_dict() - assert d["service_name"] == "s" - m2 = EventMetadata.from_dict({"service_name": "a", "service_version": "2", "user_id": "u"}) - assert m2.user_id == "u" - m3 = m.with_correlation("cid") - assert m3.correlation_id == "cid" - m4 = m.with_user("u2") - assert m4.user_id == "u2" - assert m.ensure_correlation_id().correlation_id + assert d["service_name"] == "svc" + assert d["service_version"] == "1.0" + + +def test_from_dict() -> None: + m = AvroEventMetadata.from_dict({"service_name": "a", "service_version": "2", "user_id": "u"}) + assert m.service_name == "a" + assert m.service_version == "2" + assert m.user_id == "u" + + +def test_with_correlation() -> None: + m = AvroEventMetadata(service_name="svc", service_version="1") + m2 = m.with_correlation("cid") + assert m2.correlation_id == "cid" + assert m2.service_name == m.service_name # preserves other fields + +def test_with_user() -> None: + m = AvroEventMetadata(service_name="svc", service_version="1") + m2 = m.with_user("u1") + assert m2.user_id == "u1" + + +def test_ensure_correlation_id() -> None: + m = AvroEventMetadata(service_name="svc", service_version="1") + # ensure_correlation_id returns self if correlation_id already present + same = m.ensure_correlation_id() + assert same.correlation_id == m.correlation_id + assert m.ensure_correlation_id().correlation_id diff --git a/backend/tests/unit/events/test_metadata_model_min.py b/backend/tests/unit/events/test_metadata_model_min.py deleted file mode 100644 index dc816879..00000000 --- a/backend/tests/unit/events/test_metadata_model_min.py +++ /dev/null @@ -1,14 +0,0 @@ -from app.events.metadata import EventMetadata - - -def test_event_metadata_helpers(): - m = EventMetadata(service_name="svc", service_version="1") - # ensure_correlation_id returns self if present - same = m.ensure_correlation_id() - assert same.correlation_id == m.correlation_id - # with_correlation and with_user create copies - m2 = m.with_correlation("cid") - assert m2.correlation_id == "cid" and m2.service_name == m.service_name - m3 = m.with_user("u1") - assert m3.user_id == "u1" - diff --git a/backend/tests/unit/events/test_schema_registry_manager.py b/backend/tests/unit/events/test_schema_registry_manager.py index c378c8b0..458b2323 100644 --- a/backend/tests/unit/events/test_schema_registry_manager.py +++ b/backend/tests/unit/events/test_schema_registry_manager.py @@ -2,7 +2,7 @@ from app.events.schema.schema_registry import SchemaRegistryManager from app.infrastructure.kafka.events.execution import ExecutionRequestedEvent -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.kafka.events.pod import PodCreatedEvent diff --git a/backend/tests/unit/infrastructure/mappers/test_admin_mapper.py b/backend/tests/unit/infrastructure/mappers/test_admin_mapper.py index f8daef48..a63a6c2c 100644 --- a/backend/tests/unit/infrastructure/mappers/test_admin_mapper.py +++ b/backend/tests/unit/infrastructure/mappers/test_admin_mapper.py @@ -1,7 +1,6 @@ -import pytest from datetime import datetime, timezone -from app.infrastructure.mappers import AuditLogMapper, SettingsMapper, UserListResultMapper, UserMapper +import pytest from app.domain.admin import ( AuditAction, AuditLogEntry, @@ -11,10 +10,10 @@ SystemSettings, ) from app.domain.user import User as DomainAdminUser -from app.domain.user import UserListResult, UserRole, UserUpdate, UserCreation +from app.domain.user import UserCreation, UserRole, UserUpdate +from app.infrastructure.mappers import AuditLogMapper, SettingsMapper, UserMapper from app.schemas_pydantic.user import User as ServiceUser - pytestmark = pytest.mark.unit @@ -39,9 +38,6 @@ def test_user_mapper_roundtrip_and_validation() -> None: back = UserMapper.from_mongo_document(doc) assert back.user_id == user.user_id and back.email == user.email - resp = UserMapper.to_response_dict(user) - assert isinstance(resp["created_at"], float) and isinstance(resp["updated_at"], float) - # invalid email doc_bad = {**doc, "email": "bad"} with pytest.raises(ValueError): @@ -77,18 +73,6 @@ def test_user_mapper_from_service_and_update_dict() -> None: assert "created_at" in cdict and "updated_at" in cdict -def test_user_list_result_mapper() -> None: - users = [ - DomainAdminUser( - user_id="u1", username="a", email="a@example.com", role=UserRole.USER, - is_active=True, is_superuser=False, hashed_password="h", created_at=_now(), updated_at=_now() - ) - ] - res = UserListResult(users=users, total=1, offset=0, limit=10) - out = UserListResultMapper.to_dict(res) - assert out["total"] == 1 and len(out["users"]) == 1 - - def test_settings_mapper_roundtrip_defaults_and_custom() -> None: # defaults exec_limits = SettingsMapper.execution_limits_from_dict(None) diff --git a/backend/tests/unit/infrastructure/mappers/test_dlq_mapper.py b/backend/tests/unit/infrastructure/mappers/test_dlq_mapper.py index a7a7aaf7..185c320c 100644 --- a/backend/tests/unit/infrastructure/mappers/test_dlq_mapper.py +++ b/backend/tests/unit/infrastructure/mappers/test_dlq_mapper.py @@ -6,13 +6,11 @@ import pytest from app.dlq.models import ( - DLQBatchRetryResult, DLQFields, DLQMessage, DLQMessageFilter, DLQMessageStatus, DLQMessageUpdate, - DLQRetryResult, ) from app.domain.enums.events import EventType from app.infrastructure.mappers.dlq_mapper import DLQMapper @@ -286,61 +284,6 @@ def test_from_kafka_message_minimal(self, sample_event): assert msg.dlq_partition is None assert msg.headers == {} - def test_to_response_dict(self, sample_dlq_message): - """Test converting DLQ message to response dictionary.""" - result = DLQMapper.to_response_dict(sample_dlq_message) - - assert result["event_id"] == "event-123" - assert result["event_type"] == sample_dlq_message.event_type - assert result["event"] == sample_dlq_message.event.to_dict() - assert result["original_topic"] == "execution-events" - assert result["error"] == "Test error" - assert result["retry_count"] == 2 - assert result["status"] == DLQMessageStatus.PENDING - assert result["producer_id"] == "test-producer" - assert result["dlq_offset"] == 100 - assert result["dlq_partition"] == 1 - assert result["last_error"] == "Connection timeout" - assert result["discard_reason"] == "Max retries exceeded" - assert "age_seconds" in result - assert "failed_at" in result - assert "next_retry_at" in result - assert "retried_at" in result - assert "discarded_at" in result - - def test_retry_result_to_dict_success(self): - """Test converting successful retry result to dictionary.""" - result = DLQRetryResult(event_id="event-123", status="success") - - d = DLQMapper.retry_result_to_dict(result) - - assert d == {"event_id": "event-123", "status": "success"} - - def test_retry_result_to_dict_with_error(self): - """Test converting retry result with error to dictionary.""" - result = DLQRetryResult(event_id="event-123", status="failed", error="Connection error") - - d = DLQMapper.retry_result_to_dict(result) - - assert d == {"event_id": "event-123", "status": "failed", "error": "Connection error"} - - def test_batch_retry_result_to_dict(self): - """Test converting batch retry result to dictionary.""" - details = [ - DLQRetryResult(event_id="event-1", status="success"), - DLQRetryResult(event_id="event-2", status="failed", error="Error"), - ] - result = DLQBatchRetryResult(total=2, successful=1, failed=1, details=details) - - d = DLQMapper.batch_retry_result_to_dict(result) - - assert d["total"] == 2 - assert d["successful"] == 1 - assert d["failed"] == 1 - assert len(d["details"]) == 2 - assert d["details"][0] == {"event_id": "event-1", "status": "success"} - assert d["details"][1] == {"event_id": "event-2", "status": "failed", "error": "Error"} - def test_from_failed_event(self, sample_event): """Test creating DLQ message from failed event.""" msg = DLQMapper.from_failed_event( diff --git a/backend/tests/unit/infrastructure/mappers/test_event_mapper_extended.py b/backend/tests/unit/infrastructure/mappers/test_event_mapper_extended.py index 68dde2f4..04189d28 100644 --- a/backend/tests/unit/infrastructure/mappers/test_event_mapper_extended.py +++ b/backend/tests/unit/infrastructure/mappers/test_event_mapper_extended.py @@ -3,44 +3,25 @@ from datetime import datetime, timezone import pytest - from app.domain.events.event_models import ( ArchivedEvent, Event, - EventBrowseResult, - EventDetail, - EventExportRow, EventFields, EventFilter, - EventListResult, - EventProjection, - EventReplayInfo, - EventStatistics, - EventSummary, - HourlyEventCount, - UserEventCount, ) -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.mappers.event_mapper import ( ArchivedEventMapper, - EventBrowseResultMapper, - EventDetailMapper, EventExportRowMapper, EventFilterMapper, - EventListResultMapper, EventMapper, - EventProjectionMapper, - EventReplayInfoMapper, - EventStatisticsMapper, - EventSummaryMapper, ) -from app.schemas_pydantic.admin_events import EventFilter as AdminEventFilter @pytest.fixture def sample_metadata(): """Create sample event metadata.""" - return EventMetadata( + return AvroEventMetadata( service_name="test-service", service_version="1.0.0", correlation_id="corr-123", @@ -75,7 +56,7 @@ def minimal_event(): event_type="minimal.event", event_version="1.0", timestamp=datetime.now(timezone.utc), - metadata=EventMetadata(service_name="minimal-service", service_version="1.0.0"), + metadata=AvroEventMetadata(service_name="minimal-service", service_version="1.0.0"), payload={}, ) @@ -91,7 +72,6 @@ def test_to_mongo_document_with_all_fields(self, sample_event): assert doc[EventFields.EVENT_TYPE] == "test.event" assert doc[EventFields.EVENT_VERSION] == "2.0" assert doc[EventFields.TIMESTAMP] == sample_event.timestamp - assert doc[EventFields.METADATA] == sample_event.metadata.to_dict() assert doc[EventFields.PAYLOAD] == {"key": "value", "nested": {"data": 123}} assert doc[EventFields.AGGREGATE_ID] == "agg-456" assert doc[EventFields.STORED_AT] == sample_event.stored_at @@ -111,180 +91,6 @@ def test_to_mongo_document_minimal(self, minimal_event): assert EventFields.STATUS not in doc assert EventFields.ERROR not in doc - def test_to_dict_with_all_fields(self, sample_event): - """Test converting event to dictionary with all optional fields.""" - result = EventMapper.to_dict(sample_event) - - assert result["event_id"] == "event-123" - assert result["event_type"] == "test.event" - assert result["event_version"] == "2.0" - assert result["aggregate_id"] == "agg-456" - assert result["correlation_id"] == "corr-123" - assert result["stored_at"] == sample_event.stored_at - assert result["ttl_expires_at"] == sample_event.ttl_expires_at - assert result["status"] == "processed" - assert result["error"] == "Some error occurred" - - def test_to_dict_minimal(self, minimal_event): - """Test converting minimal event to dictionary.""" - result = EventMapper.to_dict(minimal_event) - - assert result["event_id"] == "event-minimal" - assert result["event_type"] == "minimal.event" - assert "aggregate_id" not in result - # correlation_id is auto-generated by EventMetadata - assert "correlation_id" in result - assert "stored_at" not in result - assert "ttl_expires_at" not in result - assert "status" not in result - assert "error" not in result - - -class TestEventSummaryMapper: - """Test EventSummaryMapper with all branches.""" - - def test_to_dict_with_aggregate_id(self): - """Test converting summary with aggregate_id.""" - summary = EventSummary( - event_id="event-123", - event_type="test.event", - timestamp=datetime.now(timezone.utc), - aggregate_id="agg-456", - ) - - result = EventSummaryMapper.to_dict(summary) - - assert result[EventFields.EVENT_ID] == "event-123" - assert result[EventFields.EVENT_TYPE] == "test.event" - assert result[EventFields.AGGREGATE_ID] == "agg-456" - - def test_to_dict_without_aggregate_id(self): - """Test converting summary without aggregate_id.""" - summary = EventSummary( - event_id="event-456", - event_type="test.event", - timestamp=datetime.now(timezone.utc), - aggregate_id=None, - ) - - result = EventSummaryMapper.to_dict(summary) - - assert result[EventFields.EVENT_ID] == "event-456" - assert EventFields.AGGREGATE_ID not in result - - -class TestEventStatisticsMapper: - """Test EventStatisticsMapper with all branches.""" - - def test_to_dict_with_times(self): - """Test converting statistics with start and end times.""" - stats = EventStatistics( - total_events=1000, - events_by_type={"type1": 500, "type2": 500}, - events_by_service={"service1": 600, "service2": 400}, - events_by_hour=[ - HourlyEventCount(hour="2024-01-15T10:00:00", count=100), - HourlyEventCount(hour="2024-01-15T11:00:00", count=150), - ], - top_users=[ - UserEventCount(user_id="user1", event_count=50), - UserEventCount(user_id="user2", event_count=40), - ], - error_rate=0.05, - avg_processing_time=1.5, - start_time=datetime(2024, 1, 15, 0, 0, 0, tzinfo=timezone.utc), - end_time=datetime(2024, 1, 15, 23, 59, 59, tzinfo=timezone.utc), - ) - - result = EventStatisticsMapper.to_dict(stats) - - assert result["total_events"] == 1000 - assert result["start_time"] == stats.start_time - assert result["end_time"] == stats.end_time - - def test_to_dict_without_times(self): - """Test converting statistics without start and end times.""" - stats = EventStatistics( - total_events=500, - events_by_type={}, - events_by_service={}, - events_by_hour=[], - top_users=[], - error_rate=0.0, - avg_processing_time=0.0, - start_time=None, - end_time=None, - ) - - result = EventStatisticsMapper.to_dict(stats) - - assert result["total_events"] == 500 - assert "start_time" not in result - assert "end_time" not in result - - def test_to_dict_with_dict_hourly_counts(self): - """Test converting statistics with dictionary hourly counts.""" - stats = EventStatistics( - total_events=100, - events_by_type={}, - events_by_service={}, - events_by_hour=[ - {"hour": "2024-01-15T10:00:00", "count": 50}, # Dict format - HourlyEventCount(hour="2024-01-15T11:00:00", count=50), # Object format - ], - top_users=[], - error_rate=0.0, - avg_processing_time=0.0, - ) - - result = EventStatisticsMapper.to_dict(stats) - - assert len(result["events_by_hour"]) == 2 - assert result["events_by_hour"][0] == {"hour": "2024-01-15T10:00:00", "count": 50} - assert result["events_by_hour"][1] == {"hour": "2024-01-15T11:00:00", "count": 50} - - -class TestEventProjectionMapper: - """Test EventProjectionMapper with all branches.""" - - def test_to_dict_with_all_fields(self): - """Test converting projection with all optional fields.""" - projection = EventProjection( - name="test-projection", - pipeline=[{"$match": {"event_type": "test"}}], - output_collection="test_output", - refresh_interval_seconds=60, - description="Test projection description", - source_events=["event1", "event2"], - last_updated=datetime(2024, 1, 15, 12, 0, 0, tzinfo=timezone.utc), - ) - - result = EventProjectionMapper.to_dict(projection) - - assert result["name"] == "test-projection" - assert result["description"] == "Test projection description" - assert result["source_events"] == ["event1", "event2"] - assert result["last_updated"] == projection.last_updated - - def test_to_dict_minimal(self): - """Test converting minimal projection.""" - projection = EventProjection( - name="minimal-projection", - pipeline=[], - output_collection="output", - refresh_interval_seconds=30, - description=None, - source_events=None, - last_updated=None, - ) - - result = EventProjectionMapper.to_dict(projection) - - assert result["name"] == "minimal-projection" - assert "description" not in result - assert "source_events" not in result - assert "last_updated" not in result - class TestArchivedEventMapper: """Test ArchivedEventMapper with all branches.""" @@ -463,4 +269,4 @@ def test_from_event_minimal(self, minimal_event): assert row.user_id == "" assert row.service == "minimal-service" assert row.status == "" - assert row.error == "" \ No newline at end of file + assert row.error == "" diff --git a/backend/tests/unit/infrastructure/mappers/test_execution_api_mapper.py b/backend/tests/unit/infrastructure/mappers/test_execution_api_mapper.py deleted file mode 100644 index 50740204..00000000 --- a/backend/tests/unit/infrastructure/mappers/test_execution_api_mapper.py +++ /dev/null @@ -1,226 +0,0 @@ -"""Tests for execution API mapper.""" - -import pytest - -from app.domain.enums.execution import ExecutionStatus -from app.domain.enums.storage import ExecutionErrorType -from app.domain.execution import DomainExecution, ResourceUsageDomain -from app.infrastructure.mappers.execution_api_mapper import ExecutionApiMapper - - -@pytest.fixture -def sample_execution(): - """Create a sample domain execution.""" - return DomainExecution( - execution_id="exec-123", - status=ExecutionStatus.COMPLETED, - lang="python", - lang_version="3.11", - stdout="Hello, World!", - stderr="", - exit_code=0, - error_type=None, - resource_usage=ResourceUsageDomain( - execution_time_wall_seconds=1.5, - cpu_time_jiffies=150, - clk_tck_hertz=100, - peak_memory_kb=2048, - ), - ) - - -class TestExecutionApiMapper: - """Test execution API mapper.""" - - def test_to_response(self, sample_execution): - """Test converting domain execution to response.""" - response = ExecutionApiMapper.to_response(sample_execution) - - assert response.execution_id == "exec-123" - assert response.status == ExecutionStatus.COMPLETED - - def test_to_response_minimal(self): - """Test converting minimal domain execution to response.""" - execution = DomainExecution( - execution_id="exec-456", - status=ExecutionStatus.RUNNING, - ) - - response = ExecutionApiMapper.to_response(execution) - - assert response.execution_id == "exec-456" - assert response.status == ExecutionStatus.RUNNING - - def test_to_result_with_resource_usage(self, sample_execution): - """Test converting domain execution to result with resource usage.""" - result = ExecutionApiMapper.to_result(sample_execution) - - assert result.execution_id == "exec-123" - assert result.status == ExecutionStatus.COMPLETED - assert result.stdout == "Hello, World!" - assert result.stderr == "" - assert result.lang == "python" - assert result.lang_version == "3.11" - assert result.exit_code == 0 - assert result.error_type is None - assert result.resource_usage is not None - assert result.resource_usage.execution_time_wall_seconds == 1.5 - assert result.resource_usage.cpu_time_jiffies == 150 - assert result.resource_usage.clk_tck_hertz == 100 - assert result.resource_usage.peak_memory_kb == 2048 - - def test_to_result_without_resource_usage(self): - """Test converting domain execution to result without resource usage.""" - execution = DomainExecution( - execution_id="exec-789", - status=ExecutionStatus.FAILED, - lang="javascript", - lang_version="20", - stdout="", - stderr="Error occurred", - exit_code=1, - error_type=ExecutionErrorType.SCRIPT_ERROR, - resource_usage=None, - ) - - result = ExecutionApiMapper.to_result(execution) - - assert result.execution_id == "exec-789" - assert result.status == ExecutionStatus.FAILED - assert result.stdout == "" - assert result.stderr == "Error occurred" - assert result.lang == "javascript" - assert result.lang_version == "20" - assert result.exit_code == 1 - assert result.error_type == ExecutionErrorType.SCRIPT_ERROR - assert result.resource_usage is None - - def test_to_result_with_script_error(self): - """Test converting domain execution with script error.""" - execution = DomainExecution( - execution_id="exec-001", - status=ExecutionStatus.FAILED, - error_type=ExecutionErrorType.SCRIPT_ERROR, - ) - - result = ExecutionApiMapper.to_result(execution) - - assert result.error_type == ExecutionErrorType.SCRIPT_ERROR - - def test_to_result_with_timeout_error(self): - """Test converting domain execution with timeout error.""" - execution = DomainExecution( - execution_id="exec-002", - status=ExecutionStatus.FAILED, - error_type=ExecutionErrorType.TIMEOUT, - ) - - result = ExecutionApiMapper.to_result(execution) - - assert result.error_type == ExecutionErrorType.TIMEOUT - - def test_to_result_with_resource_limit_error(self): - """Test converting domain execution with resource limit error.""" - execution = DomainExecution( - execution_id="exec-003", - status=ExecutionStatus.FAILED, - error_type=ExecutionErrorType.RESOURCE_LIMIT, - ) - - result = ExecutionApiMapper.to_result(execution) - - assert result.error_type == ExecutionErrorType.RESOURCE_LIMIT - - def test_to_result_with_system_error(self): - """Test converting domain execution with system error.""" - execution = DomainExecution( - execution_id="exec-004", - status=ExecutionStatus.FAILED, - error_type=ExecutionErrorType.SYSTEM_ERROR, - ) - - result = ExecutionApiMapper.to_result(execution) - - assert result.error_type == ExecutionErrorType.SYSTEM_ERROR - - def test_to_result_with_permission_denied_error(self): - """Test converting domain execution with permission denied error.""" - execution = DomainExecution( - execution_id="exec-005", - status=ExecutionStatus.FAILED, - error_type=ExecutionErrorType.PERMISSION_DENIED, - ) - - result = ExecutionApiMapper.to_result(execution) - - assert result.error_type == ExecutionErrorType.PERMISSION_DENIED - - def test_to_result_with_no_error_type(self): - """Test converting domain execution with no error type.""" - execution = DomainExecution( - execution_id="exec-006", - status=ExecutionStatus.COMPLETED, - error_type=None, - ) - - result = ExecutionApiMapper.to_result(execution) - - assert result.error_type is None - - def test_to_result_minimal(self): - """Test converting minimal domain execution to result.""" - execution = DomainExecution( - execution_id="exec-minimal", - status=ExecutionStatus.QUEUED, - lang="python", # Required field in ExecutionResult - lang_version="3.11", # Required field in ExecutionResult - ) - - result = ExecutionApiMapper.to_result(execution) - - assert result.execution_id == "exec-minimal" - assert result.status == ExecutionStatus.QUEUED - assert result.stdout is None - assert result.stderr is None - assert result.lang == "python" - assert result.lang_version == "3.11" - assert result.exit_code is None - assert result.error_type is None - assert result.resource_usage is None - - def test_to_result_all_fields_populated(self): - """Test converting fully populated domain execution to result.""" - resource_usage = ResourceUsageDomain( - execution_time_wall_seconds=2.5, - cpu_time_jiffies=250, - clk_tck_hertz=100, - peak_memory_kb=4096, - ) - - execution = DomainExecution( - execution_id="exec-full", - status=ExecutionStatus.COMPLETED, - lang="python", - lang_version="3.11", - stdout="Success output", - stderr="Debug info", - exit_code=0, - error_type=None, - resource_usage=resource_usage, - ) - - result = ExecutionApiMapper.to_result(execution) - - assert result.execution_id == "exec-full" - assert result.status == ExecutionStatus.COMPLETED - assert result.stdout == "Success output" - assert result.stderr == "Debug info" - assert result.lang == "python" - assert result.lang_version == "3.11" - assert result.exit_code == 0 - assert result.error_type is None - assert result.resource_usage is not None - assert result.resource_usage.execution_time_wall_seconds == 2.5 - assert result.resource_usage.cpu_time_jiffies == 250 - assert result.resource_usage.clk_tck_hertz == 100 - assert result.resource_usage.peak_memory_kb == 4096 \ No newline at end of file diff --git a/backend/tests/unit/infrastructure/mappers/test_infra_event_mapper.py b/backend/tests/unit/infrastructure/mappers/test_infra_event_mapper.py index 17f993a3..77211da6 100644 --- a/backend/tests/unit/infrastructure/mappers/test_infra_event_mapper.py +++ b/backend/tests/unit/infrastructure/mappers/test_infra_event_mapper.py @@ -1,28 +1,15 @@ from datetime import datetime, timezone import pytest - from app.domain.events.event_models import ( Event, - EventBrowseResult, - EventListResult, - EventProjection, - EventReplayInfo, - EventStatistics, EventSummary, - HourlyEventCount, ) -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.mappers import ( ArchivedEventMapper, - EventBrowseResultMapper, - EventDetailMapper, EventExportRowMapper, - EventListResultMapper, EventMapper, - EventProjectionMapper, - EventReplayInfoMapper, - EventStatisticsMapper, EventSummaryMapper, ) @@ -35,7 +22,7 @@ def _event(eid: str = "e1") -> Event: event_type="X", event_version="1.0", timestamp=datetime.now(timezone.utc), - metadata=EventMetadata(service_name="svc", service_version="1", user_id="u1"), + metadata=AvroEventMetadata(service_name="svc", service_version="1", user_id="u1"), payload={"k": 1}, aggregate_id="agg", status="ok", @@ -43,7 +30,7 @@ def _event(eid: str = "e1") -> Event: ) -def test_event_mapper_to_from_mongo_and_dict() -> None: +def test_event_mapper_to_from_mongo() -> None: ev = _event() doc = EventMapper.to_mongo_document(ev) assert doc["event_id"] == ev.event_id and doc["payload"]["k"] == 1 @@ -53,48 +40,26 @@ def test_event_mapper_to_from_mongo_and_dict() -> None: back = EventMapper.from_mongo_document(mongo_doc) assert back.payload.get("custom") == 123 - d = EventMapper.to_dict(ev) - assert d["event_id"] == ev.event_id and d["metadata"]["service_name"] == "svc" - - # from_dict - ev2 = EventMapper.from_dict(d | {"correlation_id": "c"}) - assert ev2.event_id == ev.event_id - -def test_summary_detail_list_browse_and_stats_mappers() -> None: +def test_summary_mapper() -> None: e = _event() - summary = EventSummary(event_id=e.event_id, event_type=e.event_type, timestamp=e.timestamp, - aggregate_id=e.aggregate_id) - sd = EventSummaryMapper.to_dict(summary) + summary = EventSummary( + event_id=e.event_id, event_type=e.event_type, timestamp=e.timestamp, aggregate_id=e.aggregate_id + ) s2 = EventSummaryMapper.from_mongo_document( - {"event_id": summary.event_id, "event_type": summary.event_type, "timestamp": summary.timestamp}) + {"event_id": summary.event_id, "event_type": summary.event_type, "timestamp": summary.timestamp} + ) assert s2.event_id == summary.event_id - detail_dict = EventDetailMapper.to_dict( - type("D", (), {"event": e, "related_events": [summary], "timeline": [summary]})()) - assert "event" in detail_dict and len(detail_dict["related_events"]) == 1 - - lres = EventListResult(events=[e], total=1, skip=0, limit=10, has_more=False) - assert EventListResultMapper.to_dict(lres)["total"] == 1 - - bres = EventBrowseResult(events=[e], total=1, skip=0, limit=10) - assert EventBrowseResultMapper.to_dict(bres)["skip"] == 0 - - stats = EventStatistics(total_events=3, events_by_hour=[HourlyEventCount(hour="h", count=1)]) - sd = EventStatisticsMapper.to_dict(stats) - assert sd["total_events"] == 3 and isinstance(sd["events_by_hour"][0], dict) - - -def test_projection_archived_export_replayinfo() -> None: - proj = EventProjection(name="p", pipeline=[{"$match": {}}], output_collection="out", description="d") - pd = EventProjectionMapper.to_dict(proj) - assert pd["name"] == "p" and pd["description"] == "d" +def test_archived_export_mapper() -> None: e = _event() arch = ArchivedEventMapper.from_event(e, deleted_by="admin", deletion_reason="r") assert arch.deleted_by == "admin" arch_doc = ArchivedEventMapper.to_mongo_document(arch) - assert "_deleted_at" in arch_doc or "_deletion_reason" in arch_doc or True # enum names vary + assert "_deleted_at" in arch_doc + assert "_deleted_by" in arch_doc + assert "_deletion_reason" in arch_doc row = type("Row", (), {})() row.event_id = e.event_id @@ -108,7 +73,3 @@ def test_projection_archived_export_replayinfo() -> None: row.error = e.error or "" ed = EventExportRowMapper.to_dict(row) assert ed["Event ID"] == e.event_id - - info = EventReplayInfo(events=[e], event_count=1, event_types=["X"], start_time=e.timestamp, end_time=e.timestamp) - infod = EventReplayInfoMapper.to_dict(info) - assert infod["event_count"] == 1 and len(infod["events"]) == 1 diff --git a/backend/tests/unit/infrastructure/mappers/test_rate_limit_mapper.py b/backend/tests/unit/infrastructure/mappers/test_rate_limit_mapper.py index ea6d100c..3a1ed0ce 100644 --- a/backend/tests/unit/infrastructure/mappers/test_rate_limit_mapper.py +++ b/backend/tests/unit/infrastructure/mappers/test_rate_limit_mapper.py @@ -1,19 +1,16 @@ from datetime import datetime, timedelta, timezone import pytest - from app.domain.rate_limit.rate_limit_models import ( EndpointGroup, RateLimitAlgorithm, RateLimitConfig, RateLimitRule, - RateLimitStatus, UserRateLimit, ) from app.infrastructure.mappers import ( RateLimitConfigMapper, RateLimitRuleMapper, - RateLimitStatusMapper, UserRateLimitMapper, ) @@ -36,7 +33,7 @@ def test_user_rate_limit_mapper_roundtrip_and_dates() -> None: assert u2.user_id == "u1" and len(u2.rules) == 1 and isinstance(u2.created_at, datetime) # from string timestamps - d["created_at"] = now.isoformat(); + d["created_at"] = now.isoformat() d["updated_at"] = (now + timedelta(seconds=1)).isoformat() u3 = UserRateLimitMapper.from_dict(d) assert u3.created_at <= u3.updated_at @@ -53,9 +50,3 @@ def test_config_mapper_roundtrip_and_json() -> None: js = RateLimitConfigMapper.model_dump_json(cfg) c3 = RateLimitConfigMapper.model_validate_json(js) assert isinstance(c3, RateLimitConfig) and c3.global_enabled is False - - -def test_status_mapper_to_dict() -> None: - s = RateLimitStatus(allowed=True, limit=10, remaining=5, reset_at=datetime.now(timezone.utc)) - d = RateLimitStatusMapper.to_dict(s) - assert d["allowed"] is True and d["limit"] == 10 diff --git a/backend/tests/unit/infrastructure/mappers/test_rate_limit_mapper_extended.py b/backend/tests/unit/infrastructure/mappers/test_rate_limit_mapper_extended.py index c3b5e757..72363534 100644 --- a/backend/tests/unit/infrastructure/mappers/test_rate_limit_mapper_extended.py +++ b/backend/tests/unit/infrastructure/mappers/test_rate_limit_mapper_extended.py @@ -3,19 +3,16 @@ from datetime import datetime, timezone import pytest - from app.domain.rate_limit import ( EndpointGroup, RateLimitAlgorithm, RateLimitConfig, RateLimitRule, - RateLimitStatus, UserRateLimit, ) from app.infrastructure.mappers.rate_limit_mapper import ( RateLimitConfigMapper, RateLimitRuleMapper, - RateLimitStatusMapper, UserRateLimitMapper, ) @@ -322,42 +319,3 @@ def test_model_dump_json(self): assert data["default_rules"][0]["endpoint_pattern"] == "/test" assert data["global_enabled"] is True assert data["redis_ttl"] == 3600 - - -class TestRateLimitStatusMapper: - """Test RateLimitStatusMapper.""" - - def test_to_dict(self): - """Test converting RateLimitStatus to dict using asdict.""" - status = RateLimitStatus( - allowed=True, - limit=100, - remaining=75, - reset_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), - retry_after=None, - ) - - result = RateLimitStatusMapper.to_dict(status) - - assert result["allowed"] is True - assert result["limit"] == 100 - assert result["remaining"] == 75 - assert result["reset_at"] == status.reset_at - assert result["retry_after"] is None - - def test_to_dict_with_retry_after(self): - """Test converting RateLimitStatus with retry_after set.""" - status = RateLimitStatus( - allowed=False, - limit=100, - remaining=0, - reset_at=datetime(2024, 1, 1, 12, 5, 0, tzinfo=timezone.utc), - retry_after=300, # 5 minutes - ) - - result = RateLimitStatusMapper.to_dict(status) - - assert result["allowed"] is False - assert result["limit"] == 100 - assert result["remaining"] == 0 - assert result["retry_after"] == 300 \ No newline at end of file diff --git a/backend/tests/unit/infrastructure/mappers/test_replay_api_mapper.py b/backend/tests/unit/infrastructure/mappers/test_replay_api_mapper.py index 81bd0829..e21d307b 100644 --- a/backend/tests/unit/infrastructure/mappers/test_replay_api_mapper.py +++ b/backend/tests/unit/infrastructure/mappers/test_replay_api_mapper.py @@ -3,9 +3,8 @@ from datetime import datetime, timezone import pytest - from app.domain.enums.events import EventType -from app.domain.enums.replay import ReplayStatus, ReplayType, ReplayTarget +from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType from app.domain.replay import ReplayConfig, ReplayFilter, ReplaySessionState from app.infrastructure.mappers.replay_api_mapper import ReplayApiMapper from app.schemas_pydantic.replay import ReplayRequest @@ -389,4 +388,4 @@ def test_cleanup_to_response(self): ) assert response.removed_sessions == 5 - assert response.message == "Cleaned up 5 old sessions" \ No newline at end of file + assert response.message == "Cleaned up 5 old sessions" diff --git a/backend/tests/unit/infrastructure/mappers/test_replay_mapper.py b/backend/tests/unit/infrastructure/mappers/test_replay_mapper.py index 20a54740..08942ea3 100644 --- a/backend/tests/unit/infrastructure/mappers/test_replay_mapper.py +++ b/backend/tests/unit/infrastructure/mappers/test_replay_mapper.py @@ -1,16 +1,13 @@ from datetime import datetime, timezone import pytest - from app.domain.admin import ( ReplayQuery, ReplaySession, ReplaySessionStatusDetail, ) -from app.domain.admin import ReplaySessionData from app.domain.enums.replay import ReplayStatus -from app.domain.events.event_models import EventSummary -from app.infrastructure.mappers import ReplayQueryMapper, ReplaySessionDataMapper, ReplaySessionMapper +from app.infrastructure.mappers import ReplayQueryMapper, ReplaySessionMapper pytestmark = pytest.mark.unit @@ -52,9 +49,3 @@ def test_replay_query_mapper() -> None: assert "timestamp" in mq2 and "$gte" in mq2["timestamp"] and "$lte" in mq2["timestamp"] -def test_replay_session_data_mapper() -> None: - es = [EventSummary(event_id="e1", event_type="X", timestamp=datetime.now(timezone.utc))] - data = ReplaySessionData(total_events=1, replay_correlation_id="rc", dry_run=True, query={"x": 1}, - events_preview=es) - dd = ReplaySessionDataMapper.to_dict(data) - assert dd["dry_run"] is True and len(dd.get("events_preview", [])) == 1 diff --git a/backend/tests/unit/infrastructure/mappers/test_replay_mapper_extended.py b/backend/tests/unit/infrastructure/mappers/test_replay_mapper_extended.py index 083e78ee..1c16328b 100644 --- a/backend/tests/unit/infrastructure/mappers/test_replay_mapper_extended.py +++ b/backend/tests/unit/infrastructure/mappers/test_replay_mapper_extended.py @@ -1,25 +1,20 @@ """Extended tests for replay mapper to achieve 95%+ coverage.""" from datetime import datetime, timezone -from typing import Any import pytest - from app.domain.admin import ( ReplayQuery, ReplaySession, - ReplaySessionData, ReplaySessionStatusDetail, ReplaySessionStatusInfo, ) from app.domain.enums.events import EventType from app.domain.enums.replay import ReplayStatus, ReplayTarget, ReplayType -from app.domain.events.event_models import EventSummary from app.domain.replay import ReplayConfig, ReplayFilter, ReplaySessionState from app.infrastructure.mappers.replay_mapper import ( ReplayApiMapper, ReplayQueryMapper, - ReplaySessionDataMapper, ReplaySessionMapper, ReplayStateMapper, ) @@ -224,77 +219,6 @@ def test_to_mongodb_query_empty(self): assert result == {} -class TestReplaySessionDataMapper: - """Extended tests for ReplaySessionDataMapper.""" - - def test_to_dict_without_events_preview(self): - """Test converting data without events preview.""" - data = ReplaySessionData( - dry_run=False, # Not dry run - total_events=50, - replay_correlation_id="replay-corr-123", - query={"status": "completed"}, - events_preview=None, - ) - - result = ReplaySessionDataMapper.to_dict(data) - - assert result["dry_run"] is False - assert result["total_events"] == 50 - assert result["replay_correlation_id"] == "replay-corr-123" - assert result["query"] == {"status": "completed"} - assert "events_preview" not in result - - def test_to_dict_dry_run_without_preview(self): - """Test dry run but no events preview.""" - data = ReplaySessionData( - dry_run=True, - total_events=20, - replay_correlation_id="dry-corr-456", - query={"type": "test"}, - events_preview=None, # No preview even though dry run - ) - - result = ReplaySessionDataMapper.to_dict(data) - - assert result["dry_run"] is True - assert "events_preview" not in result - - def test_to_dict_with_events_preview(self): - """Test converting data with events preview.""" - events = [ - EventSummary( - event_id="event-1", - event_type="type-1", - timestamp=datetime(2024, 1, 1, 10, 0, 0, tzinfo=timezone.utc), - aggregate_id="agg-1", - ), - EventSummary( - event_id="event-2", - event_type="type-2", - timestamp=datetime(2024, 1, 1, 10, 1, 0, tzinfo=timezone.utc), - aggregate_id=None, # No aggregate_id - ), - ] - - data = ReplaySessionData( - dry_run=True, - total_events=2, - replay_correlation_id="preview-corr", - query={}, - events_preview=events, - ) - - result = ReplaySessionDataMapper.to_dict(data) - - assert result["dry_run"] is True - assert len(result["events_preview"]) == 2 - assert result["events_preview"][0]["event_id"] == "event-1" - assert result["events_preview"][0]["aggregate_id"] == "agg-1" - assert result["events_preview"][1]["event_id"] == "event-2" - assert result["events_preview"][1]["aggregate_id"] is None - - class TestReplayApiMapper: """Tests for ReplayApiMapper.""" @@ -491,4 +415,4 @@ def test_from_mongo_document_with_enum_status(self): state = ReplayStateMapper.from_mongo_document(doc) - assert state.status == ReplayStatus.FAILED \ No newline at end of file + assert state.status == ReplayStatus.FAILED diff --git a/backend/tests/unit/infrastructure/mappers/test_saga_mapper.py b/backend/tests/unit/infrastructure/mappers/test_saga_mapper.py index c8f37b44..6800c08f 100644 --- a/backend/tests/unit/infrastructure/mappers/test_saga_mapper.py +++ b/backend/tests/unit/infrastructure/mappers/test_saga_mapper.py @@ -1,15 +1,12 @@ from datetime import datetime, timezone import pytest - from app.domain.enums.saga import SagaState from app.domain.saga.models import Saga, SagaFilter, SagaInstance from app.infrastructure.mappers import ( - SagaEventMapper, SagaFilterMapper, SagaInstanceMapper, SagaMapper, - SagaResponseMapper, ) pytestmark = pytest.mark.unit @@ -29,23 +26,12 @@ def _saga() -> Saga: ) -def test_saga_mapper_to_from_mongo_and_dict() -> None: +def test_saga_mapper_to_from_mongo() -> None: s = _saga() m = SagaMapper() doc = m.to_mongo(s) s2 = m.from_mongo({**doc}) assert s2.saga_id == s.saga_id and s2.state == s.state - d = m.to_dict(s) - assert d["state"] == SagaState.RUNNING.value and isinstance(d["created_at"], str) - - -def test_saga_response_mapper() -> None: - s = _saga() - rm = SagaResponseMapper() - resp = rm.to_response(s) - assert resp.saga_id == s.saga_id - lst = rm.list_to_responses([s]) - assert len(lst) == 1 and lst[0].saga_id == s.saga_id def test_saga_instance_mapper_roundtrip_and_clean_context() -> None: @@ -74,12 +60,6 @@ def test_saga_instance_mapper_roundtrip_and_clean_context() -> None: assert inst3.state == SagaState.CREATED -def test_saga_event_mapper_to_cancelled_event() -> None: - inst = SagaInstance(saga_name="demo", execution_id="e1", context_data={"user_id": "u1"}, error_message="e") - ev = SagaEventMapper.to_cancelled_event(inst) - assert ev.cancelled_by == "u1" and ev.reason == "e" - - def test_saga_filter_mapper_to_query() -> None: f = SagaFilter(state=SagaState.COMPLETED, execution_ids=["e1"], saga_name="demo", error_status=False) fq = SagaFilterMapper().to_mongodb_query(f) diff --git a/backend/tests/unit/infrastructure/mappers/test_saga_mapper_extended.py b/backend/tests/unit/infrastructure/mappers/test_saga_mapper_extended.py index ad5c93c6..bd28091d 100644 --- a/backend/tests/unit/infrastructure/mappers/test_saga_mapper_extended.py +++ b/backend/tests/unit/infrastructure/mappers/test_saga_mapper_extended.py @@ -1,21 +1,15 @@ """Extended tests for saga mapper to achieve 95%+ coverage.""" from datetime import datetime, timezone -from typing import Any import pytest - from app.domain.enums.saga import SagaState from app.domain.saga.models import Saga, SagaFilter, SagaInstance -from app.infrastructure.kafka.events.metadata import EventMetadata from app.infrastructure.mappers.saga_mapper import ( - SagaEventMapper, SagaFilterMapper, SagaInstanceMapper, SagaMapper, - SagaResponseMapper, ) -from app.schemas_pydantic.saga import SagaStatusResponse @pytest.fixture @@ -102,21 +96,6 @@ def test_to_mongo_with_non_dict_context(self): # Should return the non-dict value as-is (line 38 checks isinstance) assert doc["context_data"] == "not a dict" - def test_to_dict_without_completed_at(self): - """Test to_dict when completed_at is None.""" - saga = Saga( - saga_id="saga-003", - saga_name="incomplete", - execution_id="exec-003", - state=SagaState.RUNNING, - completed_at=None, # Not completed - ) - - mapper = SagaMapper() - result = mapper.to_dict(saga) - - assert result["completed_at"] is None - def test_from_instance(self, sample_saga_instance): """Test converting SagaInstance to Saga.""" mapper = SagaMapper() @@ -272,52 +251,6 @@ def test_to_mongo_with_state_without_value_attr(self): assert doc["state"] == "MOCK_STATE" -class TestSagaEventMapper: - """Extended tests for SagaEventMapper.""" - - def test_to_cancelled_event_with_user_id_param(self, sample_saga_instance): - """Test cancelled event with user_id parameter.""" - event = SagaEventMapper.to_cancelled_event( - sample_saga_instance, - user_id="param-user", - service_name="test-service", - service_version="2.0.0", - ) - - assert event.cancelled_by == "param-user" - assert event.metadata.user_id == "param-user" - assert event.metadata.service_name == "test-service" - assert event.metadata.service_version == "2.0.0" - - def test_to_cancelled_event_from_context(self): - """Test cancelled event taking user_id from context_data.""" - instance = SagaInstance( - saga_name="test", - execution_id="exec-129", - context_data={"user_id": "context-user"}, - error_message="Context error", - ) - - event = SagaEventMapper.to_cancelled_event(instance) - - assert event.cancelled_by == "context-user" - assert event.reason == "Context error" - - def test_to_cancelled_event_default_system(self): - """Test cancelled event defaulting to 'system' when no user_id.""" - instance = SagaInstance( - saga_name="test", - execution_id="exec-130", - context_data={}, # No user_id - error_message=None, # No error message - ) - - event = SagaEventMapper.to_cancelled_event(instance) - - assert event.cancelled_by == "system" - assert event.reason == "User requested cancellation" # Default reason - - class TestSagaFilterMapper: """Extended tests for SagaFilterMapper.""" @@ -392,49 +325,3 @@ def test_to_mongodb_query_empty_filter(self): query = mapper.to_mongodb_query(filter_obj) assert query == {} - - -class TestSagaResponseMapper: - """Extended tests for SagaResponseMapper.""" - - def test_to_response_with_none_completed_at(self): - """Test response mapping when completed_at is None.""" - saga = Saga( - saga_id="saga-200", - saga_name="incomplete", - execution_id="exec-200", - state=SagaState.RUNNING, - completed_at=None, - ) - - mapper = SagaResponseMapper() - response = mapper.to_response(saga) - - assert response.saga_id == "saga-200" - assert response.completed_at is None - - def test_list_to_responses_empty(self): - """Test converting empty list of sagas.""" - mapper = SagaResponseMapper() - responses = mapper.list_to_responses([]) - - assert responses == [] - - def test_list_to_responses_multiple(self): - """Test converting multiple sagas to responses.""" - sagas = [ - Saga( - saga_id=f"saga-{i}", - saga_name="test", - execution_id=f"exec-{i}", - state=SagaState.COMPLETED, - ) - for i in range(3) - ] - - mapper = SagaResponseMapper() - responses = mapper.list_to_responses(sagas) - - assert len(responses) == 3 - assert all(isinstance(r, SagaStatusResponse) for r in responses) - assert [r.saga_id for r in responses] == ["saga-0", "saga-1", "saga-2"] \ No newline at end of file diff --git a/backend/tests/unit/infrastructure/mappers/test_saved_script_mapper.py b/backend/tests/unit/infrastructure/mappers/test_saved_script_mapper.py index 62f12a2f..018684de 100644 --- a/backend/tests/unit/infrastructure/mappers/test_saved_script_mapper.py +++ b/backend/tests/unit/infrastructure/mappers/test_saved_script_mapper.py @@ -5,9 +5,7 @@ from uuid import UUID import pytest - from app.domain.saved_script.models import ( - DomainSavedScript, DomainSavedScriptCreate, DomainSavedScriptUpdate, ) @@ -258,4 +256,4 @@ def test_from_mongo_document_partial_string_fields(self): with pytest.raises(TypeError) as exc_info: SavedScriptMapper.from_mongo_document(doc) - assert "missing" in str(exc_info.value).lower() \ No newline at end of file + assert "missing" in str(exc_info.value).lower() diff --git a/backend/tests/unit/schemas_pydantic/test_events_schemas.py b/backend/tests/unit/schemas_pydantic/test_events_schemas.py index 6f647fc1..f3121cbb 100644 --- a/backend/tests/unit/schemas_pydantic/test_events_schemas.py +++ b/backend/tests/unit/schemas_pydantic/test_events_schemas.py @@ -5,7 +5,7 @@ from app.domain.enums.common import SortOrder from app.domain.enums.events import EventType -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.schemas_pydantic.events import ( EventAggregationRequest, EventBase, @@ -37,7 +37,7 @@ def test_event_filter_request_sort_validator_rejects_invalid(): def test_event_base_and_in_db_defaults_and_metadata(): - meta = EventMetadata(service_name="tests", service_version="1.0", user_id="u1") + meta = AvroEventMetadata(service_name="tests", service_version="1.0", user_id="u1") ev = EventBase( event_type=EventType.EXECUTION_REQUESTED, metadata=meta, diff --git a/backend/tests/unit/services/pod_monitor/test_event_mapper.py b/backend/tests/unit/services/pod_monitor/test_event_mapper.py index b280d73f..0dcb35e8 100644 --- a/backend/tests/unit/services/pod_monitor/test_event_mapper.py +++ b/backend/tests/unit/services/pod_monitor/test_event_mapper.py @@ -2,7 +2,7 @@ import pytest from app.domain.enums.storage import ExecutionErrorType -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.services.pod_monitor.event_mapper import PodContext, PodEventMapper from tests.helpers.k8s_fakes import ( Meta, @@ -21,7 +21,7 @@ def _ctx(pod: Pod, event_type: str = "ADDED") -> PodContext: - return PodContext(pod=pod, execution_id="e1", metadata=EventMetadata(service_name="t", service_version="1"), phase=pod.status.phase or "", event_type=event_type) + return PodContext(pod=pod, execution_id="e1", metadata=AvroEventMetadata(service_name="t", service_version="1"), phase=pod.status.phase or "", event_type=event_type) def test_pending_running_and_succeeded_mapping() -> None: diff --git a/backend/tests/unit/services/saga/test_saga_step_and_base.py b/backend/tests/unit/services/saga/test_saga_step_and_base.py index c3c670b5..76e4dcc9 100644 --- a/backend/tests/unit/services/saga/test_saga_step_and_base.py +++ b/backend/tests/unit/services/saga/test_saga_step_and_base.py @@ -37,7 +37,7 @@ async def compensate(self, context: SagaContext) -> bool: # noqa: ARG002 @pytest.mark.asyncio async def test_context_adders() -> None: - from app.infrastructure.kafka.events.metadata import EventMetadata + from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.kafka.events.base import BaseEvent from app.domain.enums.events import EventType @@ -46,7 +46,7 @@ class E(BaseEvent): topic = None # type: ignore[assignment] ctx = SagaContext("s1", "e1") - evt = E(metadata=EventMetadata(service_name="t", service_version="1")) + evt = E(metadata=AvroEventMetadata(service_name="t", service_version="1")) ctx.add_event(evt) assert len(ctx.events) == 1 comp = _DummyComp() diff --git a/backend/tests/unit/services/test_pod_builder.py b/backend/tests/unit/services/test_pod_builder.py index de97031a..cd271631 100644 --- a/backend/tests/unit/services/test_pod_builder.py +++ b/backend/tests/unit/services/test_pod_builder.py @@ -3,7 +3,7 @@ import pytest from kubernetes import client as k8s_client -from app.infrastructure.kafka.events.metadata import EventMetadata +from app.infrastructure.kafka.events.metadata import AvroEventMetadata from app.infrastructure.kafka.events.saga import CreatePodCommandEvent from app.services.k8s_worker.config import K8sWorkerConfig from app.services.k8s_worker.pod_builder import PodBuilder @@ -41,7 +41,7 @@ def create_pod_command(self) -> CreatePodCommandEvent: cpu_limit="1000m", memory_limit="1Gi", priority=5, - metadata=EventMetadata( + metadata=AvroEventMetadata( user_id=str(uuid4()), correlation_id=str(uuid4()), service_name="test-service", @@ -155,7 +155,7 @@ def test_container_resources_defaults( cpu_limit="", memory_limit="", priority=5, - metadata=EventMetadata( + metadata=AvroEventMetadata( service_name="svc", service_version="1", user_id=str(uuid4()), @@ -288,7 +288,7 @@ def test_pod_timeout_default( cpu_limit="500m", memory_limit="512Mi", priority=5, - metadata=EventMetadata(user_id=str(uuid4()), service_name="t", service_version="1") + metadata=AvroEventMetadata(user_id=str(uuid4()), service_name="t", service_version="1") ) pod = pod_builder.build_pod_manifest(command) @@ -345,7 +345,7 @@ def test_pod_labels_truncation( cpu_request="50m", memory_request="64Mi", priority=5, - metadata=EventMetadata( + metadata=AvroEventMetadata( service_name="svc", service_version="1", user_id=long_id, @@ -402,7 +402,7 @@ def test_different_languages( cpu_limit="200m", memory_limit="256Mi", priority=5, - metadata=EventMetadata(user_id=str(uuid4()), service_name="t", service_version="1") + metadata=AvroEventMetadata(user_id=str(uuid4()), service_name="t", service_version="1") ) pod = pod_builder.build_pod_manifest(cmd) diff --git a/docs/testing/kafka-test-stability.md b/docs/testing/kafka-test-stability.md new file mode 100644 index 00000000..a7effd75 --- /dev/null +++ b/docs/testing/kafka-test-stability.md @@ -0,0 +1,86 @@ +# Kafka test stability + +## The problem + +When running tests in parallel (e.g., with `pytest-xdist`), you might encounter sporadic crashes with messages like: + +```text +Fatal Python error: Aborted +``` + +The stack trace typically points to `confluent_kafka` operations, often during producer initialization in fixtures or test setup. This isn't a bug in the application code - it's a known race condition in the underlying `librdkafka` C library. + +## Why it happens + +The `confluent-kafka-python` library is a thin wrapper around `librdkafka`, a high-performance C library. When multiple Python processes or threads try to create Kafka `Producer` instances simultaneously, they can trigger a race condition in `librdkafka`'s internal initialization routines. + +This manifests as: + +- Random `SIGABRT` signals during test runs +- Crashes in `rd_kafka_broker_destroy_final` or similar internal functions +- Flaky CI failures that pass on retry + +The issue is particularly common in CI environments where tests run in parallel across multiple workers. + +## The fix + +The solution is to serialize `Producer` initialization using a global threading lock. This prevents multiple threads from entering `librdkafka`'s initialization code simultaneously. + +In `app/events/core/producer.py`: + +```python +import threading + +# Global lock to serialize Producer initialization (workaround for librdkafka race condition) +# See: https://github.com/confluentinc/confluent-kafka-python/issues/1797 +_producer_init_lock = threading.Lock() + +class UnifiedProducer: + async def start(self) -> None: + # ... config setup ... + + # Serialize Producer initialization to prevent librdkafka race condition + with _producer_init_lock: + self._producer = Producer(producer_config) + + # ... rest of startup ... +``` + +The lock is process-global, so all `UnifiedProducer` instances in the same process will serialize their initialization. This adds negligible overhead in production (producers are typically created once at startup) while eliminating the race condition in tests. + +## Related issues + +These GitHub issues document the underlying problem: + +| Issue | Description | +|-------|-------------| +| [confluent-kafka-python#1797](https://github.com/confluentinc/confluent-kafka-python/issues/1797) | Segfaults in multithreaded/asyncio pytest environments | +| [confluent-kafka-python#1761](https://github.com/confluentinc/confluent-kafka-python/issues/1761) | Segfault on garbage collection in multithreaded context | +| [librdkafka#3608](https://github.com/confluentinc/librdkafka/issues/3608) | Crash in `rd_kafka_broker_destroy_final` | + +## Alternative approaches + +If you still encounter issues: + +1. **Reduce parallelism** - Run Kafka-dependent tests with fewer workers: `pytest -n 2` instead of `-n auto` + +2. **Isolate Kafka tests** - Mark Kafka tests and run them separately: + ```python + @pytest.mark.kafka + def test_producer_sends_message(): + ... + ``` + ```bash + pytest -m "not kafka" -n auto # parallel + pytest -m kafka -n 1 # sequential + ``` + +3. **Use fixtures carefully** - Ensure producer fixtures are properly scoped and cleaned up: + ```python + @pytest.fixture(scope="function") + async def producer(): + p = UnifiedProducer(config, schema_registry) + await p.start() + yield p + await p.stop() # Always clean up + ``` diff --git a/mkdocs.yml b/mkdocs.yml index c43b7ff6..d46b76f9 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -139,3 +139,4 @@ nav: - Testing: - Load Testing: testing/load-testing.md - Frontend Testing: testing/frontend-testing.md + - Kafka Test Stability: testing/kafka-test-stability.md