Skip to content

Commit 0b1ef49

Browse files
authored
chore: frontend updates (#44)
* userSettings simplifications * lang info types + updated frontend + typehints for langs except python in code editor * loading state fix * double opt-in for notifications * unified theme setting (removed setThemeLocal) * csrf fix: local storage -> session storage * icons lib instead of hardcoded svgs * editor page refactoring * - tests for new notifications - keyboard navi support in editor - smaller fixes * api interceptors simpler functions * refactoring of admin pages + tests * relative imports -> imports with $; also fixed warnings during build
1 parent 2dd9e1e commit 0b1ef49

File tree

157 files changed

+7615
-4917
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

157 files changed

+7615
-4917
lines changed

backend/app/api/routes/admin/events.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ async def browse_events(request: EventBrowseRequest, service: FromDishka[AdminEv
4343
event_filter = EventFilterMapper.from_admin_pydantic(request.filters)
4444

4545
result = await service.browse_events(
46-
filter=event_filter,
46+
event_filter=event_filter,
4747
skip=request.skip,
4848
limit=request.limit,
4949
sort_by=request.sort_by,
@@ -92,7 +92,7 @@ async def export_events_csv(
9292
end_time=end_time,
9393
)
9494
)
95-
result = await service.export_events_csv_content(filter=export_filter, limit=limit)
95+
result = await service.export_events_csv_content(event_filter=export_filter, limit=limit)
9696
return StreamingResponse(
9797
iter([result.content]),
9898
media_type=result.media_type,
@@ -128,7 +128,7 @@ async def export_events_json(
128128
end_time=end_time,
129129
)
130130
)
131-
result = await service.export_events_json_content(filter=export_filter, limit=limit)
131+
result = await service.export_events_json_content(event_filter=export_filter, limit=limit)
132132
return StreamingResponse(
133133
iter([result.content]),
134134
media_type=result.media_type,

backend/app/api/routes/dlq.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from app.dlq import RetryPolicy
1111
from app.dlq.manager import DLQManager
1212
from app.dlq.models import DLQMessageStatus
13+
from app.domain.enums.events import EventType
1314
from app.schemas_pydantic.dlq import (
1415
DLQBatchRetryResponse,
1516
DLQMessageDetail,
@@ -50,7 +51,7 @@ async def get_dlq_messages(
5051
repository: FromDishka[DLQRepository],
5152
status: DLQMessageStatus | None = Query(None),
5253
topic: str | None = None,
53-
event_type: str | None = None,
54+
event_type: EventType | None = Query(None),
5455
limit: int = Query(50, ge=1, le=1000),
5556
offset: int = Query(0, ge=0),
5657
) -> DLQMessagesResponse:

backend/app/api/routes/events.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,7 @@ async def replay_aggregate_events(
325325
user_id=admin.user_id,
326326
)
327327
await kafka_event_service.publish_event(
328-
event_type=f"replay.{event.event_type}",
328+
event_type=event.event_type,
329329
payload=event.payload,
330330
aggregate_id=aggregate_id,
331331
correlation_id=replay_correlation_id,

backend/app/api/routes/execution.py

Lines changed: 5 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,8 @@
1010
from app.core.exceptions import IntegrationException
1111
from app.core.tracing import EventAttributes, add_span_attributes
1212
from app.core.utils import get_client_ip
13-
from app.domain.enums.common import ErrorType
1413
from app.domain.enums.events import EventType
1514
from app.domain.enums.execution import ExecutionStatus
16-
from app.domain.enums.storage import ExecutionErrorType
1715
from app.domain.enums.user import UserRole
1816
from app.infrastructure.kafka.events.base import BaseEvent
1917
from app.infrastructure.kafka.events.metadata import EventMetadata
@@ -30,7 +28,6 @@
3028
ExecutionResponse,
3129
ExecutionResult,
3230
ResourceLimits,
33-
ResourceUsage,
3431
RetryExecutionRequest,
3532
)
3633
from app.schemas_pydantic.user import UserResponse
@@ -54,35 +51,7 @@ async def get_execution_with_access(
5451
if domain_exec.user_id and domain_exec.user_id != current_user.user_id and current_user.role != UserRole.ADMIN:
5552
raise HTTPException(status_code=403, detail="Access denied")
5653

57-
# Map domain to Pydantic for dependency consumer
58-
ru = None
59-
if domain_exec.resource_usage is not None:
60-
ru = ResourceUsage(**vars(domain_exec.resource_usage))
61-
# Map error_type to public ErrorType in API model via mapper rules
62-
error_type = (
63-
(
64-
ErrorType.SCRIPT_ERROR
65-
if domain_exec.error_type == ExecutionErrorType.SCRIPT_ERROR
66-
else ErrorType.SYSTEM_ERROR
67-
)
68-
if domain_exec.error_type is not None
69-
else None
70-
)
71-
return ExecutionInDB(
72-
execution_id=domain_exec.execution_id,
73-
script=domain_exec.script,
74-
status=domain_exec.status,
75-
stdout=domain_exec.stdout,
76-
stderr=domain_exec.stderr,
77-
lang=domain_exec.lang,
78-
lang_version=domain_exec.lang_version,
79-
resource_usage=ru,
80-
user_id=domain_exec.user_id,
81-
exit_code=domain_exec.exit_code,
82-
error_type=error_type,
83-
created_at=domain_exec.created_at,
84-
updated_at=domain_exec.updated_at,
85-
)
54+
return ExecutionInDB.model_validate(domain_exec)
8655

8756

8857
@router.post("/execute", response_model=ExecutionResponse)
@@ -268,24 +237,14 @@ async def retry_execution(
268237
async def get_execution_events(
269238
execution: Annotated[ExecutionInDB, Depends(get_execution_with_access)],
270239
event_service: FromDishka[EventService],
271-
event_types: str | None = Query(None, description="Comma-separated event types to filter"),
240+
event_types: list[EventType] | None = Query(None, description="Event types to filter"),
272241
limit: int = Query(100, ge=1, le=1000),
273242
) -> list[ExecutionEventResponse]:
274243
"""Get all events for an execution."""
275-
event_type_list = None
276-
if event_types:
277-
event_type_list = [t.strip() for t in event_types.split(",")]
278-
279244
events = await event_service.get_events_by_aggregate(
280-
aggregate_id=execution.execution_id, event_types=event_type_list, limit=limit
245+
aggregate_id=execution.execution_id, event_types=event_types, limit=limit
281246
)
282-
283-
return [
284-
ExecutionEventResponse(
285-
event_id=event.event_id, event_type=event.event_type, timestamp=event.timestamp, payload=event.payload
286-
)
287-
for event in events
288-
]
247+
return [ExecutionEventResponse.model_validate(e) for e in events]
289248

290249

291250
@router.get("/user/executions", response_model=ExecutionListResponse)
@@ -336,7 +295,7 @@ async def get_k8s_resource_limits(
336295
) -> ResourceLimits:
337296
try:
338297
limits = await execution_service.get_k8s_resource_limits()
339-
return ResourceLimits(**vars(limits))
298+
return ResourceLimits.model_validate(limits)
340299
except Exception as e:
341300
raise HTTPException(status_code=500, detail="Failed to retrieve resource limits") from e
342301

backend/app/db/repositories/admin/admin_events_repository.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -58,14 +58,14 @@ def __init__(self, db: Database):
5858

5959
async def browse_events(
6060
self,
61-
filter: EventFilter,
61+
event_filter: EventFilter,
6262
skip: int = 0,
6363
limit: int = 50,
6464
sort_by: str = EventFields.TIMESTAMP,
6565
sort_order: int = SortDirection.DESCENDING,
6666
) -> EventBrowseResult:
6767
"""Browse events with filters using domain models."""
68-
query = EventFilterMapper.to_mongo_query(filter)
68+
query = EventFilterMapper.to_mongo_query(event_filter)
6969

7070
# Get total count
7171
total = await self.events_collection.count_documents(query)
@@ -191,9 +191,9 @@ async def get_event_stats(self, hours: int = 24) -> EventStatistics:
191191

192192
return statistics
193193

194-
async def export_events_csv(self, filter: EventFilter) -> List[EventExportRow]:
194+
async def export_events_csv(self, event_filter: EventFilter) -> List[EventExportRow]:
195195
"""Export events as CSV data."""
196-
query = EventFilterMapper.to_mongo_query(filter)
196+
query = EventFilterMapper.to_mongo_query(event_filter)
197197

198198
cursor = self.events_collection.find(query).sort(EventFields.TIMESTAMP, SortDirection.DESCENDING).limit(10000)
199199

backend/app/db/repositories/dlq_repository.py

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
TopicStatistic,
1919
)
2020
from app.dlq.manager import DLQManager
21+
from app.domain.enums.events import EventType
2122
from app.domain.events.event_models import CollectionNames
2223
from app.infrastructure.mappers.dlq_mapper import DLQMapper
2324

@@ -105,18 +106,14 @@ async def get_dlq_stats(self) -> DLQStatistics:
105106

106107
async def get_messages(
107108
self,
108-
status: str | None = None,
109+
status: DLQMessageStatus | None = None,
109110
topic: str | None = None,
110-
event_type: str | None = None,
111+
event_type: EventType | None = None,
111112
limit: int = 50,
112113
offset: int = 0,
113114
) -> DLQMessageListResult:
114-
# Create filter
115-
filter = DLQMessageFilter(
116-
status=DLQMessageStatus(status) if status else None, topic=topic, event_type=event_type
117-
)
118-
119-
query = DLQMapper.filter_to_query(filter)
115+
msg_filter = DLQMessageFilter(status=status, topic=topic, event_type=event_type)
116+
query = DLQMapper.filter_to_query(msg_filter)
120117
total_count = await self.dlq_collection.count_documents(query)
121118

122119
cursor = self.dlq_collection.find(query).sort(DLQFields.FAILED_AT, -1).skip(offset).limit(limit)

backend/app/db/repositories/event_repository.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from app.core.logging import logger
1010
from app.core.tracing import EventAttributes
1111
from app.core.tracing.utils import add_span_attributes
12+
from app.domain.enums.events import EventType
1213
from app.domain.enums.user import UserRole
1314
from app.domain.events import (
1415
ArchivedEvent,
@@ -114,11 +115,11 @@ async def get_events_by_type(
114115
return [self.mapper.from_mongo_document(doc) for doc in docs]
115116

116117
async def get_events_by_aggregate(
117-
self, aggregate_id: str, event_types: list[str] | None = None, limit: int = 100
118+
self, aggregate_id: str, event_types: list[EventType] | None = None, limit: int = 100
118119
) -> list[Event]:
119120
query: dict[str, Any] = {EventFields.AGGREGATE_ID: aggregate_id}
120121
if event_types:
121-
query[EventFields.EVENT_TYPE] = {"$in": event_types}
122+
query[EventFields.EVENT_TYPE] = {"$in": [t.value for t in event_types]}
122123

123124
cursor = self._collection.find(query).sort(EventFields.TIMESTAMP, ASCENDING).limit(limit)
124125
docs = await cursor.to_list(length=limit)

backend/app/db/repositories/replay_repository.py

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
from app.core.database_context import Collection, Database
66
from app.core.logging import logger
77
from app.domain.admin.replay_updates import ReplaySessionUpdate
8+
from app.domain.enums.replay import ReplayStatus
89
from app.domain.events.event_models import CollectionNames
910
from app.domain.replay import ReplayFilter, ReplaySessionState
1011
from app.infrastructure.mappers import ReplayStateMapper
@@ -42,13 +43,13 @@ async def get_session(self, session_id: str) -> ReplaySessionState | None:
4243
return self._mapper.from_mongo_document(data) if data else None
4344

4445
async def list_sessions(
45-
self, status: str | None = None, user_id: str | None = None, limit: int = 100, skip: int = 0
46+
self, status: ReplayStatus | None = None, user_id: str | None = None, limit: int = 100, skip: int = 0
4647
) -> list[ReplaySessionState]:
4748
collection = self.replay_collection
4849

49-
query = {}
50+
query: dict[str, object] = {}
5051
if status:
51-
query["status"] = status
52+
query["status"] = status.value
5253
if user_id:
5354
query["config.filter.user_id"] = user_id
5455

@@ -58,15 +59,20 @@ async def list_sessions(
5859
sessions.append(self._mapper.from_mongo_document(doc))
5960
return sessions
6061

61-
async def update_session_status(self, session_id: str, status: str) -> bool:
62+
async def update_session_status(self, session_id: str, status: ReplayStatus) -> bool:
6263
"""Update the status of a replay session"""
63-
result = await self.replay_collection.update_one({"session_id": session_id}, {"$set": {"status": status}})
64+
result = await self.replay_collection.update_one({"session_id": session_id}, {"$set": {"status": status.value}})
6465
return result.modified_count > 0
6566

6667
async def delete_old_sessions(self, cutoff_time: str) -> int:
6768
"""Delete old completed/failed/cancelled sessions"""
69+
terminal_statuses = [
70+
ReplayStatus.COMPLETED.value,
71+
ReplayStatus.FAILED.value,
72+
ReplayStatus.CANCELLED.value,
73+
]
6874
result = await self.replay_collection.delete_many(
69-
{"created_at": {"$lt": cutoff_time}, "status": {"$in": ["completed", "failed", "cancelled"]}}
75+
{"created_at": {"$lt": cutoff_time}, "status": {"$in": terminal_statuses}}
7076
)
7177
return result.deleted_count
7278

@@ -83,16 +89,16 @@ async def update_replay_session(self, session_id: str, updates: ReplaySessionUpd
8389
result = await self.replay_collection.update_one({"session_id": session_id}, {"$set": mongo_updates})
8490
return result.modified_count > 0
8591

86-
async def count_events(self, filter: ReplayFilter) -> int:
92+
async def count_events(self, replay_filter: ReplayFilter) -> int:
8793
"""Count events matching the given filter"""
88-
query = filter.to_mongo_query()
94+
query = replay_filter.to_mongo_query()
8995
return await self.events_collection.count_documents(query)
9096

9197
async def fetch_events(
92-
self, filter: ReplayFilter, batch_size: int = 100, skip: int = 0
98+
self, replay_filter: ReplayFilter, batch_size: int = 100, skip: int = 0
9399
) -> AsyncIterator[List[Dict[str, Any]]]:
94100
"""Fetch events in batches based on filter"""
95-
query = filter.to_mongo_query()
101+
query = replay_filter.to_mongo_query()
96102
cursor = self.events_collection.find(query).sort("timestamp", 1).skip(skip)
97103

98104
batch = []

backend/app/db/repositories/saga_repository.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -46,17 +46,17 @@ async def get_saga(self, saga_id: str) -> Saga | None:
4646
doc = await self.sagas.find_one({"saga_id": saga_id})
4747
return self.mapper.from_mongo(doc) if doc else None
4848

49-
async def get_sagas_by_execution(self, execution_id: str, state: str | None = None) -> list[Saga]:
49+
async def get_sagas_by_execution(self, execution_id: str, state: SagaState | None = None) -> list[Saga]:
5050
query: dict[str, object] = {"execution_id": execution_id}
5151
if state:
52-
query["state"] = state
52+
query["state"] = state.value
5353

5454
cursor = self.sagas.find(query).sort("created_at", DESCENDING)
5555
docs = await cursor.to_list(length=None)
5656
return [self.mapper.from_mongo(doc) for doc in docs]
5757

58-
async def list_sagas(self, filter: SagaFilter, limit: int = 100, skip: int = 0) -> SagaListResult:
59-
query = self.filter_mapper.to_mongodb_query(filter)
58+
async def list_sagas(self, saga_filter: SagaFilter, limit: int = 100, skip: int = 0) -> SagaListResult:
59+
query = self.filter_mapper.to_mongodb_query(saga_filter)
6060

6161
# Get total count
6262
total = await self.sagas.count_documents(query)
@@ -69,8 +69,8 @@ async def list_sagas(self, filter: SagaFilter, limit: int = 100, skip: int = 0)
6969

7070
return SagaListResult(sagas=sagas, total=total, skip=skip, limit=limit)
7171

72-
async def update_saga_state(self, saga_id: str, state: str, error_message: str | None = None) -> bool:
73-
update_data = {"state": state, "updated_at": datetime.now(timezone.utc)}
72+
async def update_saga_state(self, saga_id: str, state: SagaState, error_message: str | None = None) -> bool:
73+
update_data: dict[str, object] = {"state": state.value, "updated_at": datetime.now(timezone.utc)}
7474

7575
if error_message:
7676
update_data["error_message"] = error_message
@@ -108,8 +108,8 @@ async def find_timed_out_sagas(
108108
docs = await cursor.to_list(length=limit)
109109
return [self.mapper.from_mongo(doc) for doc in docs]
110110

111-
async def get_saga_statistics(self, filter: SagaFilter | None = None) -> dict[str, object]:
112-
query = self.filter_mapper.to_mongodb_query(filter) if filter else {}
111+
async def get_saga_statistics(self, saga_filter: SagaFilter | None = None) -> dict[str, object]:
112+
query = self.filter_mapper.to_mongodb_query(saga_filter) if saga_filter else {}
113113

114114
# Basic counts
115115
total = await self.sagas.count_documents(query)

backend/app/db/repositories/sse_repository.py

Lines changed: 11 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,41 +1,28 @@
1+
from datetime import datetime, timezone
2+
13
from app.core.database_context import Collection, Database
4+
from app.domain.enums.execution import ExecutionStatus
25
from app.domain.events.event_models import CollectionNames
36
from app.domain.execution import DomainExecution
4-
from app.domain.sse import SSEEventDomain, SSEExecutionStatusDomain
7+
from app.domain.sse import SSEExecutionStatusDomain
58
from app.infrastructure.mappers import SSEMapper
69

710

811
class SSERepository:
9-
def __init__(self, database: Database):
12+
def __init__(self, database: Database) -> None:
1013
self.db = database
1114
self.executions_collection: Collection = self.db.get_collection(CollectionNames.EXECUTIONS)
12-
self.events_collection: Collection = self.db.get_collection(CollectionNames.EVENTS)
1315
self.mapper = SSEMapper()
1416

1517
async def get_execution_status(self, execution_id: str) -> SSEExecutionStatusDomain | None:
16-
execution = await self.executions_collection.find_one(
17-
{"execution_id": execution_id}, {"status": 1, "execution_id": 1, "_id": 0}
18-
)
19-
20-
if execution:
21-
return self.mapper.to_execution_status(execution_id, execution.get("status", "unknown"))
22-
return None
23-
24-
async def get_execution_events(self, execution_id: str, limit: int = 100, skip: int = 0) -> list[SSEEventDomain]:
25-
cursor = (
26-
self.events_collection.find({"aggregate_id": execution_id}).sort("timestamp", 1).skip(skip).limit(limit)
27-
)
28-
29-
events: list[SSEEventDomain] = []
30-
async for event in cursor:
31-
events.append(self.mapper.event_from_mongo_document(event))
32-
return events
33-
34-
async def get_execution_for_user(self, execution_id: str, user_id: str) -> DomainExecution | None:
35-
doc = await self.executions_collection.find_one({"execution_id": execution_id, "user_id": user_id})
18+
doc = await self.executions_collection.find_one({"execution_id": execution_id}, {"status": 1, "_id": 0})
3619
if not doc:
3720
return None
38-
return self.mapper.execution_from_mongo_document(doc)
21+
return SSEExecutionStatusDomain(
22+
execution_id=execution_id,
23+
status=ExecutionStatus(doc["status"]),
24+
timestamp=datetime.now(timezone.utc).isoformat(),
25+
)
3926

4027
async def get_execution(self, execution_id: str) -> DomainExecution | None:
4128
doc = await self.executions_collection.find_one({"execution_id": execution_id})

0 commit comments

Comments
 (0)