Skip to content

Commit 9c17556

Browse files
authored
test: added test for routers (#3)
1 parent 2416d13 commit 9c17556

File tree

10 files changed

+150
-55
lines changed

10 files changed

+150
-55
lines changed

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,7 @@ cython_debug/
178178
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
179179
# and can be added to the global gitignore or merged into this file. However, if you prefer,
180180
# you could uncomment the following to ignore the enitre vscode folder
181-
# .vscode/
181+
.vscode/
182182

183183
# Ruff stuff:
184184
.ruff_cache/

alembic/versions/adc8ba536dea_add_label_to_processing_jobs.py

Lines changed: 0 additions & 34 deletions
This file was deleted.

alembic/versions/3b5001cdded8_create_processing_jobs_table.py renamed to alembic/versions/b3ba8db2adef_creation_of_processing_jobs.py

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
1-
"""Create processing jobs table
1+
"""Creation of processing jobs
22
3-
Revision ID: 3b5001cdded8
3+
Revision ID: b3ba8db2adef
44
Revises:
5-
Create Date: 2025-08-11 14:50:01.851507
5+
Create Date: 2025-08-13 09:37:59.014491
66
77
"""
88
from typing import Sequence, Union
@@ -12,7 +12,7 @@
1212

1313

1414
# revision identifiers, used by Alembic.
15-
revision: str = '3b5001cdded8'
15+
revision: str = 'b3ba8db2adef'
1616
down_revision: Union[str, Sequence[str], None] = None
1717
branch_labels: Union[str, Sequence[str], None] = None
1818
depends_on: Union[str, Sequence[str], None] = None
@@ -23,19 +23,21 @@ def upgrade() -> None:
2323
# ### commands auto generated by Alembic - please adjust! ###
2424
op.create_table('processing_jobs',
2525
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
26-
sa.Column('title', sa.String(), nullable=True),
27-
sa.Column('status', sa.Enum('CREATED', 'RUNNING', 'FINISHED', 'FAILED', name='processingstatusenum'), nullable=True),
28-
sa.Column('user_id', sa.String(), nullable=True),
26+
sa.Column('title', sa.String(), nullable=False),
27+
sa.Column('label', sa.Enum('OPENEO', 'OGC_API_PROCESS', name='processtypeenum'), nullable=False),
28+
sa.Column('status', sa.Enum('CREATED', 'RUNNING', 'FINISHED', 'FAILED', name='processingstatusenum'), nullable=False),
29+
sa.Column('user_id', sa.String(), nullable=False),
2930
sa.Column('platform_job_id', sa.String(), nullable=True),
3031
sa.Column('parameters', sa.String(), nullable=True),
3132
sa.Column('result_link', sa.String(), nullable=True),
3233
sa.Column('service_record', sa.String(), nullable=True),
33-
sa.Column('created', sa.DateTime(), nullable=True),
34-
sa.Column('updated', sa.DateTime(), nullable=True),
34+
sa.Column('created', sa.DateTime(), nullable=False),
35+
sa.Column('updated', sa.DateTime(), nullable=False),
3536
sa.PrimaryKeyConstraint('id')
3637
)
3738
op.create_index(op.f('ix_processing_jobs_created'), 'processing_jobs', ['created'], unique=False)
3839
op.create_index(op.f('ix_processing_jobs_id'), 'processing_jobs', ['id'], unique=False)
40+
op.create_index(op.f('ix_processing_jobs_label'), 'processing_jobs', ['label'], unique=False)
3941
op.create_index(op.f('ix_processing_jobs_platform_job_id'), 'processing_jobs', ['platform_job_id'], unique=False)
4042
op.create_index(op.f('ix_processing_jobs_service_record'), 'processing_jobs', ['service_record'], unique=False)
4143
op.create_index(op.f('ix_processing_jobs_status'), 'processing_jobs', ['status'], unique=False)
@@ -54,6 +56,7 @@ def downgrade() -> None:
5456
op.drop_index(op.f('ix_processing_jobs_status'), table_name='processing_jobs')
5557
op.drop_index(op.f('ix_processing_jobs_service_record'), table_name='processing_jobs')
5658
op.drop_index(op.f('ix_processing_jobs_platform_job_id'), table_name='processing_jobs')
59+
op.drop_index(op.f('ix_processing_jobs_label'), table_name='processing_jobs')
5760
op.drop_index(op.f('ix_processing_jobs_id'), table_name='processing_jobs')
5861
op.drop_index(op.f('ix_processing_jobs_created'), table_name='processing_jobs')
5962
op.drop_table('processing_jobs')

app/routers/jobs_status.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,18 +13,19 @@
1313

1414
@router.get(
1515
"/jobs_status",
16-
response_model=JobsStatusResponse,
1716
tags=["Upscale Tasks", "Unit Jobs"],
1817
summary="Get a list of all upscaling tasks & processing jobs for the authenticated user",
1918
)
2019
async def jobs_status(
2120
db: Session = Depends(get_db),
2221
user: str = "foobar",
23-
):
22+
) -> JobsStatusResponse:
2423
"""
2524
Return combined list of upscaling tasks and processing jobs for the authenticated user.
2625
"""
2726
logger.debug(f"Fetching jobs list for user {user}")
27+
processing_jobs = get_processing_jobs_by_user_id(db, user)
28+
print(processing_jobs)
2829
return JobsStatusResponse(
29-
upscalingTasks=[], processingJobs=get_processing_jobs_by_user_id(db, user)
30+
upscaling_tasks=[], processing_jobs=get_processing_jobs_by_user_id(db, user)
3031
)

app/routers/unit_jobs.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,31 +15,32 @@
1515

1616
@router.post(
1717
"/unit_jobs",
18-
response_model=ProcessingJobSummary,
1918
status_code=status.HTTP_201_CREATED,
2019
tags=["Unit Jobs"],
2120
summary="Create a new processing job",
2221
)
2322
async def create_unit_job(
2423
payload: BaseJobRequest, db: Session = Depends(get_db), user: str = "foobar"
25-
):
24+
) -> ProcessingJobSummary:
2625
"""Create a new processing job with the provided data."""
2726
try:
2827
return create_processing_job(db, user, payload)
2928
except Exception as e:
30-
logger.error(f"Error creating unit job: {e}")
29+
logger.error(f"Error creating unit job for user {user}: {e}")
3130
raise HTTPException(
3231
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
3332
detail=f"An error occurred while creating the processing job: {e}",
3433
)
3534

3635

37-
@router.get("/unit_jobs/{job_id}", response_model=ProcessingJob, tags=["Unit Jobs"])
38-
async def get_job(job_id: int, db: Session = Depends(get_db), user: str = "foobar"):
36+
@router.get("/unit_jobs/{job_id}", tags=["Unit Jobs"])
37+
async def get_job(
38+
job_id: int, db: Session = Depends(get_db), user: str = "foobar"
39+
) -> ProcessingJob:
3940
job = get_processing_job_by_user_id(db, job_id, user)
4041
if not job:
4142
raise HTTPException(
4243
status_code=404,
43-
detail={"code": "NotFound", "message": "Processing job not found"},
44+
detail=f"Processing job {job_id} not found",
4445
)
4546
return job

app/schemas.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ class ProcessingStatusEnum(str, Enum):
1515
FINISHED = "finished"
1616
FAILED = "failed"
1717

18+
1819
# class TileRequest(BaseModel):
1920
# aoi: dict
2021
# grid: str
@@ -76,8 +77,8 @@ class UpscalingTaskSummary(BaseModel):
7677

7778

7879
class JobsStatusResponse(BaseModel):
79-
upscalingTasks: List[UpscalingTaskSummary] = []
80-
processingJobs: List[ProcessingJobSummary] = []
80+
upscaling_tasks: List[UpscalingTaskSummary] = []
81+
processing_jobs: List[ProcessingJobSummary] = []
8182

8283

8384
class BaseJobRequest(BaseModel):

tests/conftest.py

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,19 @@
1+
from datetime import datetime
12
from unittest.mock import MagicMock
23

34
import pytest
45
from fastapi.testclient import TestClient
56
from sqlalchemy.orm import Session
67

78
from app.main import app
9+
from app.schemas import (
10+
BaseJobRequest,
11+
ProcessTypeEnum,
12+
ProcessingJob,
13+
ProcessingJobSummary,
14+
ProcessingStatusEnum,
15+
ServiceDetails,
16+
)
817

918

1019
@pytest.fixture
@@ -18,6 +27,38 @@ def fake_db_session():
1827
return MagicMock(spec=Session)
1928

2029

30+
@pytest.fixture
31+
def fake_processing_job_request():
32+
return BaseJobRequest(
33+
title="Test Job",
34+
label=ProcessTypeEnum.OPENEO,
35+
service=ServiceDetails(service="foo", application="bar"),
36+
parameters={},
37+
)
38+
39+
40+
@pytest.fixture
41+
def fake_processing_job_summary():
42+
return ProcessingJobSummary(
43+
id=1,
44+
title="Test Job",
45+
label=ProcessTypeEnum.OPENEO,
46+
status=ProcessingStatusEnum.CREATED,
47+
)
48+
49+
50+
@pytest.fixture
51+
def fake_processing_job(fake_processing_job_summary, fake_processing_job_request):
52+
return ProcessingJob(
53+
**(fake_processing_job_summary.model_dump()),
54+
service=fake_processing_job_request.service,
55+
parameters=fake_processing_job_request.parameters,
56+
result_link="https://foo.bar",
57+
created=datetime.now(),
58+
updated=datetime.now()
59+
)
60+
61+
2162
# @pytest.fixture(autouse=True)
2263
# def disable_auth(monkeypatch):
2364
# # Replace auth.get_current_user dependency with a stub for tests

tests/routers/test_job_status.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
import json
2+
from unittest.mock import patch
3+
4+
5+
from app.schemas import (
6+
JobsStatusResponse,
7+
)
8+
9+
10+
@patch("app.routers.jobs_status.get_processing_jobs_by_user_id")
11+
def test_unit_jobs_get_200(
12+
mock_get_processing_jobs, client, fake_processing_job_summary
13+
):
14+
15+
mock_get_processing_jobs.return_value = [fake_processing_job_summary]
16+
17+
r = client.get("/jobs_status")
18+
assert r.status_code == 200
19+
assert json.dumps(r.json(), indent=1) == JobsStatusResponse(
20+
upscaling_tasks=[], processing_jobs=[fake_processing_job_summary]
21+
).model_dump_json(indent=1)

tests/routers/test_unit_jobs.py

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
import json
2+
from unittest.mock import patch
3+
4+
5+
@patch("app.routers.unit_jobs.create_processing_job")
6+
def test_unit_jobs_create_201(
7+
mock_create_processing_job,
8+
client,
9+
fake_processing_job_request,
10+
fake_processing_job_summary,
11+
):
12+
13+
mock_create_processing_job.return_value = fake_processing_job_summary
14+
15+
r = client.post("/unit_jobs", json=fake_processing_job_request.model_dump())
16+
assert r.status_code == 201
17+
assert r.json() == fake_processing_job_summary.model_dump()
18+
19+
20+
@patch("app.routers.unit_jobs.create_processing_job")
21+
def test_unit_jobs_create_500(
22+
mock_create_processing_job,
23+
client,
24+
fake_processing_job_request,
25+
):
26+
27+
mock_create_processing_job.side_effect = SystemError("Could not launch the job")
28+
29+
r = client.post("/unit_jobs", json=fake_processing_job_request.model_dump())
30+
assert r.status_code == 500
31+
assert "could not launch the job" in r.json().get("detail", "").lower()
32+
33+
34+
@patch("app.routers.unit_jobs.get_processing_job_by_user_id")
35+
def test_unit_jobs_get_job_200(
36+
mock_get_processing_job,
37+
client,
38+
fake_processing_job,
39+
):
40+
41+
mock_get_processing_job.return_value = fake_processing_job
42+
43+
r = client.get("/unit_jobs/1")
44+
assert r.status_code == 200
45+
assert json.dumps(r.json(), indent=1) == fake_processing_job.model_dump_json(
46+
indent=1
47+
)
48+
49+
50+
@patch("app.routers.unit_jobs.get_processing_job_by_user_id")
51+
def test_unit_jobs_get_job_404(
52+
mock_get_processing_job,
53+
client,
54+
fake_processing_job,
55+
):
56+
57+
mock_get_processing_job.return_value = None
58+
59+
r = client.get("/unit_jobs/1")
60+
assert r.status_code == 404
61+
assert "processing job 1 not found" in r.json().get("detail", "").lower()

0 commit comments

Comments
 (0)