Skip to content
Merged
Show file tree
Hide file tree
Changes from 29 commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
55ee32e
add initial rpc endpoints for celery
bisgaard-itis Jul 2, 2025
0db7aa2
follow-up
bisgaard-itis Jul 2, 2025
b26466d
cleanups
bisgaard-itis Jul 2, 2025
10fbffd
start adding unit tests
bisgaard-itis Jul 2, 2025
8d3b509
first test
bisgaard-itis Jul 2, 2025
b397f97
ensure operations match google api guideline
bisgaard-itis Jul 2, 2025
fa701a9
fix unit test of get status endpoint
bisgaard-itis Jul 2, 2025
ec6e408
cleanup
bisgaard-itis Jul 2, 2025
5cd3085
add unit tests for additional endpoints
bisgaard-itis Jul 2, 2025
f93631d
inital implementation of rpc client to convert exceptions
bisgaard-itis Jul 2, 2025
b18792d
further improvements
bisgaard-itis Jul 2, 2025
bc7ce89
clean up error mapping
bisgaard-itis Jul 3, 2025
9542bc2
cover exceptions in tests
bisgaard-itis Jul 3, 2025
2ae921b
add status codes to openapi specs
bisgaard-itis Jul 3, 2025
56f01ff
update openapi specs
bisgaard-itis Jul 3, 2025
97345ab
@giancarloromeo absolute import -> relative import
bisgaard-itis Jul 3, 2025
c539146
Merge branch 'master' into 114-expose-function-job-logs-via-api-server
bisgaard-itis Jul 3, 2025
c5809c2
pylint
bisgaard-itis Jul 3, 2025
3c0d2a3
fix indirect import
bisgaard-itis Jul 3, 2025
cdd7f2f
pylint
bisgaard-itis Jul 3, 2025
9e947a4
Merge branch 'master' into 114-expose-function-job-logs-via-api-server
bisgaard-itis Jul 4, 2025
c607e35
@pcrespov add support_id to error model
bisgaard-itis Jul 4, 2025
21d8b1a
add support_id to error model in api-server
bisgaard-itis Jul 4, 2025
5c8c1cd
services/api-server version: 0.9.0 → 0.9.1
bisgaard-itis Jul 4, 2025
77d3283
update openapi specs
bisgaard-itis Jul 4, 2025
5222258
hide task endpoints from api
bisgaard-itis Jul 4, 2025
aaac3a2
update openapi specs
bisgaard-itis Jul 4, 2025
fa695f2
fix import
bisgaard-itis Jul 4, 2025
8d9ba17
fix typecheck
bisgaard-itis Jul 4, 2025
8ffe3fe
Merge branch 'master' into 114-expose-function-job-logs-via-api-server
bisgaard-itis Jul 4, 2025
7d11165
@pcrespov remove default values
bisgaard-itis Jul 4, 2025
088ad73
use BaseBackendError as type hint
bisgaard-itis Jul 4, 2025
fc20bab
fix typecheck
bisgaard-itis Jul 7, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ class BaseAsyncjobRpcError(OsparcErrorMixin, RuntimeError):


class JobSchedulerError(BaseAsyncjobRpcError):
msg_template: str = "Celery exception: {exc}"
msg_template: str = "Async job scheduler exception: {exc}"


class JobMissingError(BaseAsyncjobRpcError):
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
# pylint: disable=unused-argument

from dataclasses import dataclass

from models_library.api_schemas_rpc_async_jobs.async_jobs import (
AsyncJobGet,
AsyncJobId,
AsyncJobNameData,
AsyncJobResult,
AsyncJobStatus,
)
from models_library.api_schemas_rpc_async_jobs.exceptions import BaseAsyncjobRpcError
from models_library.progress_bar import ProgressReport
from models_library.rabbitmq_basic_types import RPCNamespace
from pydantic import validate_call
from pytest_mock import MockType
from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient


@dataclass
class AsyncJobSideEffects:
exception: BaseAsyncjobRpcError | None = None

@validate_call(config={"arbitrary_types_allowed": True})
async def cancel(
self,
rabbitmq_rpc_client: RabbitMQRPCClient | MockType,
*,
rpc_namespace: RPCNamespace,
job_id: AsyncJobId,
job_id_data: AsyncJobNameData,
) -> None:
if self.exception is not None:
raise self.exception
return None

@validate_call(config={"arbitrary_types_allowed": True})
async def status(
self,
rabbitmq_rpc_client: RabbitMQRPCClient | MockType,
*,
rpc_namespace: RPCNamespace,
job_id: AsyncJobId,
job_id_data: AsyncJobNameData,
) -> AsyncJobStatus:
if self.exception is not None:
raise self.exception

return AsyncJobStatus(
job_id=job_id,
progress=ProgressReport(
actual_value=50.0,
total=100.0,
attempt=1,
),
done=False,
)

@validate_call(config={"arbitrary_types_allowed": True})
async def result(
self,
rabbitmq_rpc_client: RabbitMQRPCClient | MockType,
*,
rpc_namespace: RPCNamespace,
job_id: AsyncJobId,
job_id_data: AsyncJobNameData,
) -> AsyncJobResult:
if self.exception is not None:
raise self.exception
return AsyncJobResult(result="Success")

@validate_call(config={"arbitrary_types_allowed": True})
async def list_jobs(
self,
rabbitmq_rpc_client: RabbitMQRPCClient | MockType,
*,
rpc_namespace: RPCNamespace,
job_id_data: AsyncJobNameData,
filter_: str = "",
) -> list[AsyncJobGet]:
if self.exception is not None:
raise self.exception
return [
AsyncJobGet(
job_id=AsyncJobId("123e4567-e89b-12d3-a456-426614174000"),
job_name="Example Job",
)
]
2 changes: 1 addition & 1 deletion services/api-server/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.9.0
0.9.1
14 changes: 13 additions & 1 deletion services/api-server/openapi.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"info": {
"title": "osparc.io public API",
"description": "osparc-simcore public API specifications",
"version": "0.9.0"
"version": "0.9.1"
},
"paths": {
"/v0/meta": {
Expand Down Expand Up @@ -7977,6 +7977,18 @@
"items": {},
"type": "array",
"title": "Errors"
},
"support_id": {
"anyOf": [
{
"type": "string",
"pattern": "OEC:([a-fA-F0-9]{12})-(\\d{13,14})"
},
{
"type": "null"
}
],
"title": "Support Id"
}
},
"type": "object",
Expand Down
2 changes: 1 addition & 1 deletion services/api-server/setup.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.9.0
current_version = 0.9.1
commit = True
message = services/api-server version: {current_version} → {new_version}
tag = False
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from typing import Annotated

from fastapi import Depends
from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient

from ...services_rpc.async_jobs import AsyncJobClient
from .rabbitmq import get_rabbitmq_rpc_client


def get_async_jobs_client(
rabbitmq_rpc_client: Annotated[RabbitMQRPCClient, Depends(get_rabbitmq_rpc_client)],
) -> AsyncJobClient:
return AsyncJobClient(_rabbitmq_rpc_client=rabbitmq_rpc_client)
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
solvers_jobs_read,
studies,
studies_jobs,
tasks,
users,
wallets,
)
Expand Down Expand Up @@ -65,6 +66,7 @@ def create_router(settings: ApplicationSettings):
router.include_router(
functions_routes.function_router, tags=["functions"], prefix=_FUNCTIONS_PREFIX
)
router.include_router(tasks.router, tags=["tasks"], prefix="/tasks")

# NOTE: multiple-files upload is currently disabled
# Web form to upload files at http://localhost:8000/v0/upload-form-view
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
import logging
from typing import Annotated, Any

from fastapi import APIRouter, Depends, FastAPI, status
from models_library.api_schemas_long_running_tasks.base import TaskProgress
from models_library.api_schemas_long_running_tasks.tasks import (
TaskGet,
TaskResult,
TaskStatus,
)
from models_library.api_schemas_rpc_async_jobs.async_jobs import (
AsyncJobId,
AsyncJobNameData,
)
from models_library.products import ProductName
from models_library.users import UserID
from servicelib.fastapi.dependencies import get_app

from ...models.schemas.base import ApiServerEnvelope
from ...models.schemas.errors import ErrorGet
from ...services_rpc.async_jobs import AsyncJobClient
from ..dependencies.authentication import get_current_user_id, get_product_name
from ..dependencies.tasks import get_async_jobs_client
from ._constants import (
FMSG_CHANGELOG_NEW_IN_VERSION,
create_route_description,
)

router = APIRouter()
_logger = logging.getLogger(__name__)


def _get_job_id_data(user_id: UserID, product_name: ProductName) -> AsyncJobNameData:
return AsyncJobNameData(user_id=user_id, product_name=product_name)


_DEFAULT_TASK_STATUS_CODES: dict[int | str, dict[str, Any]] = {
status.HTTP_500_INTERNAL_SERVER_ERROR: {
"description": "Internal server error",
"model": ErrorGet,
},
}


@router.get(
"",
response_model=ApiServerEnvelope[list[TaskGet]],
responses=_DEFAULT_TASK_STATUS_CODES,
status_code=status.HTTP_200_OK,
name="list_tasks",
description=create_route_description(
base="List all tasks",
changelog=[
FMSG_CHANGELOG_NEW_IN_VERSION.format("0.10-rc1"),
],
),
include_in_schema=False, # TO BE RELEASED in 0.10-rc1
)
async def list_tasks(
app: Annotated[FastAPI, Depends(get_app)],
user_id: Annotated[UserID, Depends(get_current_user_id)],
product_name: Annotated[ProductName, Depends(get_product_name)],
async_jobs: Annotated[AsyncJobClient, Depends(get_async_jobs_client)],
):
user_async_jobs = await async_jobs.list_jobs(
job_id_data=_get_job_id_data(user_id, product_name),
filter_="",
)
app_router = app.router
data = [
TaskGet(
task_id=f"{job.job_id}",
task_name=job.job_name,
status_href=app_router.url_path_for(
"get_task_status", task_id=f"{job.job_id}"
),
abort_href=app_router.url_path_for("cancel_task", task_id=f"{job.job_id}"),
result_href=app_router.url_path_for(
"get_task_result", task_id=f"{job.job_id}"
),
)
for job in user_async_jobs
]
return ApiServerEnvelope(data=data)


@router.get(
"/{task_id}",
response_model=TaskStatus,
name="get_task_status",
responses=_DEFAULT_TASK_STATUS_CODES,
status_code=status.HTTP_200_OK,
description=create_route_description(
base="Get task status",
changelog=[
FMSG_CHANGELOG_NEW_IN_VERSION.format("0.10-rc1"),
],
),
include_in_schema=False, # TO BE RELEASED in 0.10-rc1
)
async def get_task_status(
task_id: AsyncJobId,
user_id: Annotated[UserID, Depends(get_current_user_id)],
product_name: Annotated[ProductName, Depends(get_product_name)],
async_jobs: Annotated[AsyncJobClient, Depends(get_async_jobs_client)],
):
async_job_rpc_status = await async_jobs.status(
job_id=task_id,
job_id_data=_get_job_id_data(user_id, product_name),
)
_task_id = f"{async_job_rpc_status.job_id}"
return TaskStatus(
task_progress=TaskProgress(
task_id=_task_id, percent=async_job_rpc_status.progress.percent_value
),
done=async_job_rpc_status.done,
started=None,
)


@router.post(
"/{task_id}:cancel",
status_code=status.HTTP_204_NO_CONTENT,
name="cancel_task",
responses=_DEFAULT_TASK_STATUS_CODES,
description=create_route_description(
base="Cancel task",
changelog=[
FMSG_CHANGELOG_NEW_IN_VERSION.format("0.10-rc1"),
],
),
include_in_schema=False, # TO BE RELEASED in 0.10-rc1
)
async def cancel_task(
task_id: AsyncJobId,
user_id: Annotated[UserID, Depends(get_current_user_id)],
product_name: Annotated[ProductName, Depends(get_product_name)],
async_jobs: Annotated[AsyncJobClient, Depends(get_async_jobs_client)],
):
await async_jobs.cancel(
job_id=task_id,
job_id_data=_get_job_id_data(user_id, product_name),
)


@router.get(
"/{task_id}/result",
response_model=TaskResult,
name="get_task_result",
responses={
status.HTTP_404_NOT_FOUND: {
"description": "Task result not found",
"model": ErrorGet,
},
status.HTTP_409_CONFLICT: {
"description": "Task is cancelled",
"model": ErrorGet,
},
**_DEFAULT_TASK_STATUS_CODES,
},
status_code=status.HTTP_200_OK,
description=create_route_description(
base="Get task result",
changelog=[
FMSG_CHANGELOG_NEW_IN_VERSION.format("0.10-rc1"),
],
),
include_in_schema=False, # TO BE RELEASED in 0.10-rc1
)
async def get_task_result(
task_id: AsyncJobId,
user_id: Annotated[UserID, Depends(get_current_user_id)],
product_name: Annotated[ProductName, Depends(get_product_name)],
async_jobs: Annotated[AsyncJobClient, Depends(get_async_jobs_client)],
):
async_job_rpc_result = await async_jobs.result(
job_id=task_id,
job_id_data=_get_job_id_data(user_id, product_name),
)
return TaskResult(result=async_job_rpc_result.result, error=None)
Original file line number Diff line number Diff line change
@@ -1,12 +1,32 @@
import logging

from common_library.error_codes import create_error_code
from servicelib.logging_errors import create_troubleshootting_log_kwargs
from servicelib.status_codes_utils import is_5xx_server_error
from starlette.requests import Request
from starlette.responses import JSONResponse

from ...exceptions.backend_errors import BaseBackEndError
from ._utils import create_error_json_response

_logger = logging.getLogger(__name__)


async def backend_error_handler(request: Request, exc: Exception) -> JSONResponse:
assert request # nosec
assert isinstance(exc, BaseBackEndError)

return create_error_json_response(f"{exc}", status_code=exc.status_code)
user_error_msg = f"{exc}"
support_id = None
if is_5xx_server_error(exc.status_code):
support_id = create_error_code(exc)
_logger.exception(
**create_troubleshootting_log_kwargs(
user_error_msg,
error=exc,
error_code=support_id,
tip="Unexpected error",
)
)
return create_error_json_response(
user_error_msg, status_code=exc.status_code, support_id=support_id
)
Loading
Loading