diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/__init__.py b/packages/models-library/src/models_library/api_schemas_datcore_adapter/__init__.py similarity index 100% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/__init__.py rename to packages/models-library/src/models_library/api_schemas_datcore_adapter/__init__.py diff --git a/packages/models-library/src/models_library/api_schemas_datcore_adapter/datasets.py b/packages/models-library/src/models_library/api_schemas_datcore_adapter/datasets.py new file mode 100644 index 000000000000..8011f3f9ea40 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_datcore_adapter/datasets.py @@ -0,0 +1,41 @@ +from datetime import datetime +from enum import Enum, unique +from pathlib import Path + +from pydantic import BaseModel, ByteSize + + +class DatasetMetaData(BaseModel): + id: str + display_name: str + + +@unique +class DataType(str, Enum): + FILE = "FILE" + FOLDER = "FOLDER" + + +class PackageMetaData(BaseModel): + path: Path + display_path: Path + package_id: str + name: str + filename: str + s3_bucket: str + size: ByteSize + created_at: datetime + updated_at: datetime + + +class FileMetaData(BaseModel): + dataset_id: str + package_id: str + id: str + name: str + type: str + path: Path + size: int + created_at: datetime + last_modified_at: datetime + data_type: DataType diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_assert_checks.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_assert_checks.py index 40f174f5f07b..4443241192ee 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_assert_checks.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_assert_checks.py @@ -32,7 +32,7 @@ def assert_status( response.status_code == expected_status_code ), f"received {response.status_code}: {response.text}, expected {get_code_display_name(expected_status_code)}" - # reponse + # response if expected_status_code == status.HTTP_204_NO_CONTENT: assert response.text == "" return None, None diff --git a/packages/service-library/src/servicelib/fastapi/http_error.py b/packages/service-library/src/servicelib/fastapi/http_error.py index c35c615969c8..8640fbf2dbb1 100644 --- a/packages/service-library/src/servicelib/fastapi/http_error.py +++ b/packages/service-library/src/servicelib/fastapi/http_error.py @@ -99,3 +99,17 @@ def set_app_default_http_error_handlers(app: FastAPI) -> None: envelope_error=True, ), ) + + # SEE https://docs.python.org/3/library/exceptions.html#exception-hierarchy + app.add_exception_handler( + NotImplementedError, + make_http_error_handler_for_exception( + status.HTTP_501_NOT_IMPLEMENTED, NotImplementedError, envelope_error=True + ), + ) + app.add_exception_handler( + Exception, + make_http_error_handler_for_exception( + status.HTTP_500_INTERNAL_SERVER_ERROR, Exception, envelope_error=True + ), + ) diff --git a/services/datcore-adapter/requirements/_test.in b/services/datcore-adapter/requirements/_test.in index f7b499f29661..be1471675726 100644 --- a/services/datcore-adapter/requirements/_test.in +++ b/services/datcore-adapter/requirements/_test.in @@ -6,6 +6,8 @@ asgi_lifespan +botocore-stubs +boto3-stubs coverage faker pytest @@ -19,5 +21,3 @@ pytest-sugar pytest-xdist requests respx -types-boto3 -types-botocore diff --git a/services/datcore-adapter/requirements/_test.txt b/services/datcore-adapter/requirements/_test.txt index bf40c4a5f468..aecfc73b38a1 100644 --- a/services/datcore-adapter/requirements/_test.txt +++ b/services/datcore-adapter/requirements/_test.txt @@ -4,10 +4,12 @@ anyio==4.3.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in +boto3-stubs==1.37.0 + # via -r requirements/_test.in botocore-stubs==1.36.6 # via - # types-boto3 - # types-botocore + # -r requirements/_test.in + # boto3-stubs certifi==2024.2.2 # via # -c requirements/../../../requirements/constraints.txt @@ -110,17 +112,13 @@ termcolor==2.5.0 # via pytest-sugar types-awscrt==0.23.7 # via botocore-stubs -types-boto3==1.36.6 - # via -r requirements/_test.in -types-botocore==1.0.2 - # via -r requirements/_test.in types-s3transfer==0.11.2 - # via types-boto3 + # via boto3-stubs typing-extensions==4.12.2 # via # -c requirements/_base.txt + # boto3-stubs # faker - # types-boto3 urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/application.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/application.py index 473879a2ac87..337738ecf46b 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/application.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/application.py @@ -6,6 +6,6 @@ assert get_app # nosec __all__: tuple[str, ...] = ( - "get_reverse_url_mapper", "get_app", + "get_reverse_url_mapper", ) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/pennsieve.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/pennsieve.py index 026a7d1c6cc1..c7bc55f15613 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/pennsieve.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/dependencies/pennsieve.py @@ -1,4 +1,4 @@ -from typing import cast +from typing import Annotated, cast from fastapi import Depends, FastAPI from fastapi.requests import Request @@ -11,7 +11,7 @@ def _get_app(request: Request) -> FastAPI: def get_pennsieve_api_client( - app: FastAPI = Depends(_get_app), + app: Annotated[FastAPI, Depends(_get_app)], ) -> PennsieveApiClient: client = PennsieveApiClient.get_instance(app) assert client # nosec diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/http_error.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/http_error.py deleted file mode 100644 index bcf8cdec9c65..000000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/http_error.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import Callable - -from fastapi import HTTPException -from fastapi.encoders import jsonable_encoder -from starlette.requests import Request -from starlette.responses import JSONResponse - - -async def http_error_handler(_: Request, exc: Exception) -> JSONResponse: - assert isinstance(exc, HTTPException) # nosec - return JSONResponse( - content=jsonable_encoder({"errors": [exc.detail]}), status_code=exc.status_code - ) - - -def make_http_error_handler_for_exception( - status_code: int, - exception_cls: type[BaseException], - *, - override_detail_message: str | None = None, -) -> Callable: - """ - Produces a handler for BaseException-type exceptions which converts them - into an error JSON response with a given status code - - SEE https://docs.python.org/3/library/exceptions.html#concrete-exceptions - """ - - async def _http_error_handler(_: Request, exc: type[BaseException]) -> JSONResponse: - assert isinstance(exc, exception_cls) # nosec - details = override_detail_message or f"{exc}" - return JSONResponse( - content=jsonable_encoder({"errors": [details]}), status_code=status_code - ) - - return _http_error_handler diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/validation_error.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/validation_error.py deleted file mode 100644 index 3770d62cb23d..000000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/validation_error.py +++ /dev/null @@ -1,28 +0,0 @@ -from fastapi.encoders import jsonable_encoder -from fastapi.exceptions import RequestValidationError -from fastapi.openapi.constants import REF_PREFIX -from fastapi.openapi.utils import validation_error_response_definition -from pydantic import ValidationError -from starlette.requests import Request -from starlette.responses import JSONResponse -from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY - - -async def http422_error_handler( - _: Request, - exc: Exception, -) -> JSONResponse: - assert isinstance(exc, RequestValidationError | ValidationError) # nosec - return JSONResponse( - content=jsonable_encoder({"errors": exc.errors()}), - status_code=HTTP_422_UNPROCESSABLE_ENTITY, - ) - - -validation_error_response_definition["properties"] = { - "errors": { - "title": "Validation errors", - "type": "array", - "items": {"$ref": f"{REF_PREFIX}ValidationError"}, - }, -} diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/module_setup.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/module_setup.py deleted file mode 100644 index d1c1d8e84101..000000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/module_setup.py +++ /dev/null @@ -1,31 +0,0 @@ -""" - api app module -""" -from botocore.exceptions import ClientError -from fastapi import APIRouter, FastAPI -from fastapi.exceptions import HTTPException, RequestValidationError -from servicelib.fastapi.timing_middleware import add_process_time_header - -from .._meta import API_VTAG -from .errors.http_error import http_error_handler -from .errors.pennsieve_error import botocore_exceptions_handler -from .errors.validation_error import http422_error_handler -from .routes import datasets, files, health, user - - -def setup_api(app: FastAPI): - router = APIRouter() - - app.include_router(router, prefix=f"/{API_VTAG}") - app.include_router(health.router, tags=["healthcheck"], prefix=f"/{API_VTAG}") - app.include_router(user.router, tags=["user"], prefix=f"/{API_VTAG}") - app.include_router(datasets.router, tags=["datasets"], prefix=f"/{API_VTAG}") - app.include_router(files.router, tags=["files"], prefix=f"/{API_VTAG}") - - # exception handlers - app.add_exception_handler(HTTPException, http_error_handler) - app.add_exception_handler(RequestValidationError, http422_error_handler) - app.add_exception_handler(ClientError, botocore_exceptions_handler) - - # middlewares - app.middleware("http")(add_process_time_header) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/__init__.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/__init__.py similarity index 100% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/__init__.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/__init__.py diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/datasets.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/datasets.py similarity index 93% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/datasets.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/datasets.py index 7c36b01c3fce..bdfb37cf7859 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/datasets.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/datasets.py @@ -6,10 +6,13 @@ from fastapi_pagination import Page, Params from fastapi_pagination.api import create_page, resolve_params from fastapi_pagination.bases import RawParams +from models_library.api_schemas_datcore_adapter.datasets import ( + DatasetMetaData, + FileMetaData, +) from servicelib.fastapi.requests_decorators import cancel_on_disconnect from starlette import status -from ...models.domains.datasets import DatasetsOut, FileMetaDataOut from ...modules.pennsieve import PennsieveApiClient from ..dependencies.pennsieve import get_pennsieve_api_client @@ -26,7 +29,7 @@ "/datasets", summary="list datasets", status_code=status.HTTP_200_OK, - response_model=Page[DatasetsOut], + response_model=Page[DatasetMetaData], ) @cancel_on_disconnect @cached( @@ -39,7 +42,7 @@ async def list_datasets( x_datcore_api_secret: Annotated[str, Header(..., description="Datcore API Secret")], pennsieve_client: Annotated[PennsieveApiClient, Depends(get_pennsieve_api_client)], params: Annotated[Params, Depends()], -) -> Page[DatasetsOut]: +) -> Page[DatasetMetaData]: assert request # nosec raw_params: RawParams = resolve_params(params).to_raw_params() assert raw_params.limit is not None # nosec @@ -57,7 +60,7 @@ async def list_datasets( "/datasets/{dataset_id}/files", summary="list top level files/folders in a dataset", status_code=status.HTTP_200_OK, - response_model=Page[FileMetaDataOut], + response_model=Page[FileMetaData], ) @cancel_on_disconnect @cached( @@ -71,7 +74,7 @@ async def list_dataset_top_level_files( x_datcore_api_secret: Annotated[str, Header(..., description="Datcore API Secret")], pennsieve_client: Annotated[PennsieveApiClient, Depends(get_pennsieve_api_client)], params: Annotated[Params, Depends()], -) -> Page[FileMetaDataOut]: +) -> Page[FileMetaData]: assert request # nosec raw_params: RawParams = resolve_params(params).to_raw_params() @@ -91,7 +94,7 @@ async def list_dataset_top_level_files( "/datasets/{dataset_id}/files/{collection_id}", summary="list top level files/folders in a collection in a dataset", status_code=status.HTTP_200_OK, - response_model=Page[FileMetaDataOut], + response_model=Page[FileMetaData], ) @cancel_on_disconnect @cached( @@ -106,7 +109,7 @@ async def list_dataset_collection_files( x_datcore_api_secret: Annotated[str, Header(..., description="Datcore API Secret")], pennsieve_client: Annotated[PennsieveApiClient, Depends(get_pennsieve_api_client)], params: Annotated[Params, Depends()], -) -> Page[FileMetaDataOut]: +) -> Page[FileMetaData]: assert request # nosec raw_params: RawParams = resolve_params(params).to_raw_params() assert raw_params.limit is not None # nosec @@ -126,7 +129,7 @@ async def list_dataset_collection_files( "/datasets/{dataset_id}/files_legacy", summary="list all file meta data in dataset", status_code=status.HTTP_200_OK, - response_model=list[FileMetaDataOut], + response_model=list[FileMetaData], ) @cancel_on_disconnect @cached( @@ -139,7 +142,7 @@ async def list_dataset_files_legacy( x_datcore_api_key: Annotated[str, Header(..., description="Datcore API Key")], x_datcore_api_secret: Annotated[str, Header(..., description="Datcore API Secret")], pennsieve_client: Annotated[PennsieveApiClient, Depends(get_pennsieve_api_client)], -) -> list[FileMetaDataOut]: +) -> list[FileMetaData]: assert request # nosec return await pennsieve_client.list_all_dataset_files( api_key=x_datcore_api_key, diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/files.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/files.py similarity index 86% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/files.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/files.py index 2234c17d3dc1..c69cb6d0e0ca 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/files.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/files.py @@ -1,17 +1,18 @@ import logging -from typing import Annotated, Any +from typing import Annotated from fastapi import APIRouter, Depends, Header, Request +from models_library.api_schemas_datcore_adapter.datasets import PackageMetaData from pydantic import AnyUrl, TypeAdapter from servicelib.fastapi.requests_decorators import cancel_on_disconnect from starlette import status -from ...models.domains.files import FileDownloadOut +from ...models.files import FileDownloadOut from ...modules.pennsieve import PennsieveApiClient from ..dependencies.pennsieve import get_pennsieve_api_client router = APIRouter() -log = logging.getLogger(__file__) +_logger = logging.getLogger(__file__) @router.get( @@ -62,7 +63,7 @@ async def delete_file( "/packages/{package_id}/files", summary="returns a package (i.e. a file)", status_code=status.HTTP_200_OK, - response_model=list[dict[str, Any]], + response_model=list[PackageMetaData], ) @cancel_on_disconnect async def get_package( @@ -71,12 +72,15 @@ async def get_package( x_datcore_api_key: Annotated[str, Header(..., description="Datcore API Key")], x_datcore_api_secret: Annotated[str, Header(..., description="Datcore API Secret")], pennsieve_client: Annotated[PennsieveApiClient, Depends(get_pennsieve_api_client)], -) -> list[dict[str, Any]]: +) -> list[PackageMetaData]: assert request # nosec - return await pennsieve_client.get_package_files( + + data = await pennsieve_client.get_package_files( api_key=x_datcore_api_key, api_secret=x_datcore_api_secret, package_id=package_id, limit=1, offset=0, + fill_path=True, ) + return [_.to_api_model() for _ in data] diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/health.py similarity index 92% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/health.py index 9db40d3acf95..120767f3d114 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/health.py @@ -1,6 +1,6 @@ import logging from collections.abc import Callable -from datetime import datetime, timezone +from datetime import UTC, datetime from typing import Annotated from fastapi import APIRouter, Depends @@ -24,7 +24,7 @@ status_code=status.HTTP_200_OK, ) async def get_service_alive(): - return f"{__name__}@{datetime.now(timezone.utc).isoformat()}" + return f"{__name__}@{datetime.now(UTC).isoformat()}" @router.get("/ready", status_code=status.HTTP_200_OK, response_model=AppStatusCheck) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/user.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/user.py similarity index 95% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/user.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/user.py index 82f004042fde..dea213f5ec76 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/user.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/rest/user.py @@ -5,7 +5,7 @@ from servicelib.fastapi.requests_decorators import cancel_on_disconnect from starlette import status -from ...models.domains.user import Profile +from ...models.user import Profile from ...modules.pennsieve import PennsieveApiClient from ..dependencies.pennsieve import get_pennsieve_api_client diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes.py new file mode 100644 index 000000000000..d316434bc988 --- /dev/null +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes.py @@ -0,0 +1,18 @@ +""" +api app module +""" + +from fastapi import APIRouter, FastAPI + +from .._meta import API_VTAG +from .rest import datasets, files, health, user + + +def setup_rest_api_routes(app: FastAPI) -> None: + router = APIRouter() + + app.include_router(router, prefix=f"/{API_VTAG}") + app.include_router(health.router, tags=["healthcheck"], prefix=f"/{API_VTAG}") + app.include_router(user.router, tags=["user"], prefix=f"/{API_VTAG}") + app.include_router(datasets.router, tags=["datasets"], prefix=f"/{API_VTAG}") + app.include_router(files.router, tags=["files"], prefix=f"/{API_VTAG}") diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py index b3b704b110fe..60839168e97b 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py @@ -1,9 +1,9 @@ import logging import typer -from settings_library.utils_cli import create_settings_command +from settings_library.utils_cli import create_settings_command, create_version_callback -from ._meta import PROJECT_NAME +from ._meta import PROJECT_NAME, __version__ from .core.settings import ApplicationSettings log = logging.getLogger(__name__) @@ -12,13 +12,14 @@ main = typer.Typer(name=PROJECT_NAME) main.command()(create_settings_command(settings_cls=ApplicationSettings, logger=log)) +main.callback()(create_version_callback(__version__)) @main.command() -def run(): +def run() -> None: """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_datcore_adapter.main:the_app", + f"$ uvicorn {PROJECT_NAME}.main:the_app", fg=typer.colors.BLUE, ) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py index 9b037edacbdc..5ecac86b8825 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py @@ -1,18 +1,20 @@ import logging -from fastapi import FastAPI, HTTPException -from fastapi.exceptions import RequestValidationError +from common_library.basic_types import BootModeEnum +from fastapi import FastAPI +from fastapi.middleware.gzip import GZipMiddleware +from fastapi_pagination import add_pagination +from servicelib.fastapi import timing_middleware +from servicelib.fastapi.http_error import set_app_default_http_error_handlers from servicelib.fastapi.openapi import override_fastapi_openapi_method from servicelib.fastapi.prometheus_instrumentation import ( setup_prometheus_instrumentation, ) from servicelib.fastapi.tracing import initialize_tracing -from servicelib.logging_utils import config_all_loggers +from starlette.middleware.base import BaseHTTPMiddleware from .._meta import API_VERSION, API_VTAG, APP_NAME -from ..api.errors.http_error import http_error_handler -from ..api.errors.validation_error import http422_error_handler -from ..api.module_setup import setup_api +from ..api.routes import setup_rest_api_routes from ..modules import pennsieve from .events import ( create_start_app_handler, @@ -29,22 +31,10 @@ "hpack", ) -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -def create_app(settings: ApplicationSettings | None = None) -> FastAPI: - if settings is None: - settings = ApplicationSettings.create_from_envs() - assert settings # nosec - - logging.basicConfig(level=settings.LOG_LEVEL.value) - logging.root.setLevel(settings.LOG_LEVEL.value) - config_all_loggers( - log_format_local_dev_enabled=settings.DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, - tracing_settings=settings.DATCORE_ADAPTER_TRACING, - ) - +def create_app(settings: ApplicationSettings) -> FastAPI: # keep mostly quiet noisy loggers quiet_level: int = max( min(logging.root.level + LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING @@ -52,11 +42,13 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: for name in NOISY_LOGGERS: logging.getLogger(name).setLevel(quiet_level) - logger.debug("App settings:\n%s", settings.model_dump_json(indent=2)) + + _logger.debug("App settings:\n%s", settings.model_dump_json(indent=1)) app = FastAPI( - debug=settings.debug, - title="Datcore Adapter Service", + debug=settings.SC_BOOT_MODE + in [BootModeEnum.DEBUG, BootModeEnum.DEVELOPMENT, BootModeEnum.LOCAL], + title=APP_NAME, description="Interfaces with Pennsieve storage service", version=API_VERSION, openapi_url=f"/api/{API_VTAG}/openapi.json", @@ -64,6 +56,7 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: redoc_url=None, # default disabled ) override_fastapi_openapi_method(app) + add_pagination(app) app.state.settings = settings @@ -76,6 +69,13 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: APP_NAME, ) + if settings.SC_BOOT_MODE != BootModeEnum.PRODUCTION: + # middleware to time requests (ONLY for development) + app.add_middleware( + BaseHTTPMiddleware, dispatch=timing_middleware.add_process_time_header + ) + app.add_middleware(GZipMiddleware) + # events app.add_event_handler("startup", on_startup) app.add_event_handler("startup", create_start_app_handler(app)) @@ -83,12 +83,11 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: app.add_event_handler("shutdown", on_shutdown) # Routing - setup_api(app) + setup_rest_api_routes(app) if settings.PENNSIEVE.PENNSIEVE_ENABLED: pennsieve.setup(app, settings.PENNSIEVE) - app.add_exception_handler(HTTPException, http_error_handler) - app.add_exception_handler(RequestValidationError, http422_error_handler) + set_app_default_http_error_handlers(app) return app diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py index c95f13f0ecc0..98f091c76e93 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py @@ -1,8 +1,7 @@ -from functools import cached_property from typing import Annotated from common_library.basic_types import DEFAULT_FACTORY -from models_library.basic_types import BootModeEnum, LogLevel +from models_library.basic_types import LogLevel from pydantic import AliasChoices, Field, TypeAdapter, field_validator from pydantic.networks import AnyUrl from servicelib.logging_utils_filtering import LoggerName, MessageSubstring @@ -69,15 +68,6 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ] - @cached_property - def debug(self) -> bool: - """If True, debug tracebacks should be returned on errors.""" - return self.SC_BOOT_MODE in [ - BootModeEnum.DEBUG, - BootModeEnum.DEVELOPMENT, - BootModeEnum.LOCAL, - ] - @field_validator("LOG_LEVEL", mode="before") @classmethod def _validate_loglevel(cls, value: str) -> str: diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/__init__.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/errors/__init__.py similarity index 100% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/__init__.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/errors/__init__.py diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/pennsieve_error.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/errors/handlers.py similarity index 62% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/pennsieve_error.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/errors/handlers.py index c1101961b34a..90561e459f6e 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/pennsieve_error.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/errors/handlers.py @@ -1,5 +1,7 @@ from botocore.exceptions import ClientError +from fastapi import FastAPI from fastapi.encoders import jsonable_encoder +from servicelib.fastapi.http_error import set_app_default_http_error_handlers from starlette.requests import Request from starlette.responses import JSONResponse from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_500_INTERNAL_SERVER_ERROR @@ -12,12 +14,19 @@ async def botocore_exceptions_handler( assert isinstance(exc, ClientError) # nosec assert "Error" in exc.response # nosec assert "Code" in exc.response["Error"] # nosec + error_content = {"errors": [f"{exc}"]} if exc.response["Error"]["Code"] == "NotAuthorizedException": return JSONResponse( - content=jsonable_encoder({"errors": exc.response["Error"]}), + content=jsonable_encoder({"error": error_content}), status_code=HTTP_401_UNAUTHORIZED, ) return JSONResponse( - content=jsonable_encoder({"errors": exc.response["Error"]}), + content=jsonable_encoder({"error": error_content}), status_code=HTTP_500_INTERNAL_SERVER_ERROR, ) + + +def set_exception_handlers(app: FastAPI) -> None: + set_app_default_http_error_handlers(app) + + app.add_exception_handler(ClientError, botocore_exceptions_handler) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py index d163148a200a..7bd6a7871631 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py @@ -1,7 +1,22 @@ -"""Main application to be deployed in for example uvicorn -""" +"""Main application to be deployed in for example uvicorn""" + +import logging + from fastapi import FastAPI +from servicelib.logging_utils import config_all_loggers from simcore_service_datcore_adapter.core.application import create_app +from simcore_service_datcore_adapter.core.settings import ApplicationSettings + +_the_settings = ApplicationSettings.create_from_envs() + +# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 +logging.basicConfig(level=_the_settings.log_level) # NOSONAR +logging.root.setLevel(_the_settings.log_level) +config_all_loggers( + log_format_local_dev_enabled=_the_settings.DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_the_settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, + tracing_settings=_the_settings.DATCORE_ADAPTER_TRACING, +) # SINGLETON FastAPI app -the_app: FastAPI = create_app() +the_app: FastAPI = create_app(_the_settings) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/datasets.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/datasets.py deleted file mode 100644 index e91d632d30d7..000000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/datasets.py +++ /dev/null @@ -1,6 +0,0 @@ -from ..schemas.datasets import DatasetMetaData, FileMetaData - - -DatasetsOut = DatasetMetaData - -FileMetaDataOut = FileMetaData diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/files.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/files.py deleted file mode 100644 index a125faaa5fd3..000000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/files.py +++ /dev/null @@ -1,5 +0,0 @@ -from pydantic import AnyUrl, BaseModel - - -class FileDownloadOut(BaseModel): - link: AnyUrl diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/files.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/files.py new file mode 100644 index 000000000000..8275315b42bf --- /dev/null +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/files.py @@ -0,0 +1,36 @@ +import datetime +from pathlib import Path +from typing import Annotated + +from models_library.api_schemas_datcore_adapter.datasets import PackageMetaData +from pydantic import AnyUrl, BaseModel, ByteSize, Field + + +class FileDownloadOut(BaseModel): + link: AnyUrl + + +class DatCorePackageMetaData(BaseModel): + id: int + path: Path + display_path: Path + package_id: Annotated[str, Field(alias="packageId")] + name: str + filename: str + s3_bucket: Annotated[str, Field(alias="s3bucket")] + size: ByteSize + created_at: Annotated[datetime.datetime, Field(alias="createdAt")] + updated_at: Annotated[datetime.datetime, Field(alias="updatedAt")] + + def to_api_model(self) -> PackageMetaData: + return PackageMetaData( + path=self.path, + display_path=self.display_path, + package_id=self.package_id, + name=self.name, + filename=self.filename, + s3_bucket=self.s3_bucket, + size=self.size, + created_at=self.created_at, + updated_at=self.updated_at, + ) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/__init__.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/datasets.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/datasets.py deleted file mode 100644 index 5a10a88dfcb1..000000000000 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/datasets.py +++ /dev/null @@ -1,60 +0,0 @@ -from datetime import datetime -from enum import Enum, unique -from pathlib import Path -from typing import Any - -from pydantic import BaseModel - - -class DatasetMetaData(BaseModel): - id: str - display_name: str - - -@unique -class DataType(str, Enum): - FILE = "FILE" - FOLDER = "FOLDER" - - -class FileMetaData(BaseModel): - dataset_id: str - package_id: str - id: str - name: str - type: str - path: Path - size: int - created_at: datetime - last_modified_at: datetime - data_type: DataType - - @classmethod - def from_pennsieve_package( - cls, package: dict[str, Any], files: list[dict[str, Any]], base_path: Path - ): - """creates a FileMetaData from a pennsieve data structure.""" - pck_name: str = package["content"]["name"] - if "extension" in package and not pck_name.endswith(package["extension"]): - pck_name += ".".join((pck_name, package["extension"])) - - file_size = 0 - if package["content"]["packageType"] != "Collection" and files: - file_size = files[0]["content"]["size"] - - return cls( - dataset_id=package["content"]["datasetNodeId"], - package_id=package["content"]["nodeId"], - id=f"{package['content']['id']}", - name=pck_name, - path=base_path / pck_name, - type=package["content"]["packageType"], - size=file_size, - created_at=package["content"]["createdAt"], - last_modified_at=package["content"]["updatedAt"], - data_type=( - DataType.FOLDER - if package["content"]["packageType"] == "Collection" - else DataType.FILE - ), - ) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/user.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/user.py similarity index 100% rename from services/datcore-adapter/src/simcore_service_datcore_adapter/models/domains/user.py rename to services/datcore-adapter/src/simcore_service_datcore_adapter/models/user.py diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/modules/pennsieve.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/modules/pennsieve.py index edd8a4f381c6..781d86f4916c 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/modules/pennsieve.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/modules/pennsieve.py @@ -9,6 +9,11 @@ import boto3 from aiocache import SimpleMemoryCache # type: ignore[import-untyped] from fastapi.applications import FastAPI +from models_library.api_schemas_datcore_adapter.datasets import ( + DatasetMetaData, + DataType, + FileMetaData, +) from servicelib.logging_utils import log_context from servicelib.utils import logged_gather from starlette import status @@ -19,8 +24,8 @@ from tenacity.stop import stop_after_attempt from ..core.settings import PennsieveSettings -from ..models.domains.user import Profile -from ..models.schemas.datasets import DatasetMetaData, FileMetaData +from ..models.files import DatCorePackageMetaData +from ..models.user import Profile from ..utils.client_base import BaseServiceClientApi, setup_client_instance logger = logging.getLogger(__name__) @@ -29,6 +34,36 @@ _GATHER_MAX_CONCURRENCY = 10 +def _to_file_meta_data( + package: dict[str, Any], files: list[DatCorePackageMetaData], base_path: Path +) -> FileMetaData: + """creates a FileMetaData from a pennsieve data structure.""" + pck_name: str = package["content"]["name"] + if "extension" in package and not pck_name.endswith(package["extension"]): + pck_name += ".".join((pck_name, package["extension"])) + + file_size = 0 + if package["content"]["packageType"] != "Collection" and files: + file_size = files[0].size + + return FileMetaData( + dataset_id=package["content"]["datasetNodeId"], + package_id=package["content"]["nodeId"], + id=f"{package['content']['id']}", + name=pck_name, + path=base_path / pck_name, + type=package["content"]["packageType"], + size=file_size, + created_at=package["content"]["createdAt"], + last_modified_at=package["content"]["updatedAt"], + data_type=( + DataType.FOLDER + if package["content"]["packageType"] == "Collection" + else DataType.FILE + ), + ) + + def _compute_file_path( all_packages: dict[str, dict[str, Any]], pck: dict[str, Any] ) -> Path: @@ -215,27 +250,66 @@ async def _get_package( ) async def get_package_files( - self, api_key: str, api_secret: str, package_id: str, limit: int, offset: int - ) -> list[dict[str, Any]]: - return cast( - list[dict[str, Any]], - await self._request( - api_key, - api_secret, - "GET", - f"/packages/{package_id}/files", - params={"limit": limit, "offset": offset}, - ), + self, + *, + api_key: str, + api_secret: str, + package_id: str, + limit: int, + offset: int, + fill_path: bool, + ) -> list[DatCorePackageMetaData]: + raw_data = await self._request( + api_key, + api_secret, + "GET", + f"/packages/{package_id}/files", + params={"limit": limit, "offset": offset}, ) + path = display_path = Path() + if fill_path: + package_info = await self._get_package(api_key, api_secret, package_id) + dataset_id = package_info["content"]["datasetId"] + dataset = await self._get_dataset(api_key, api_secret, dataset_id) + + path = ( + Path(dataset_id) + / Path( + "/".join( + ancestor["content"]["id"] + for ancestor in package_info.get("ancestors", []) + ) + ) + / Path(package_info["content"]["name"]) + ) + display_path = ( + Path(dataset["content"]["name"]) + / Path( + "/".join( + ancestor["content"]["name"] + for ancestor in package_info.get("ancestors", []) + ) + ) + / Path(package_info["content"]["name"]) + ) + + return [ + DatCorePackageMetaData(**_["content"], path=path, display_path=display_path) + for _ in raw_data + ] async def _get_pck_id_files( self, api_key: str, api_secret: str, pck_id: str, pck: dict[str, Any] - ) -> tuple[str, list[dict[str, Any]]]: - + ) -> tuple[str, list[DatCorePackageMetaData]]: return ( pck_id, await self.get_package_files( - api_key, api_secret, pck["content"]["nodeId"], limit=1, offset=0 + api_key=api_key, + api_secret=api_secret, + package_id=pck["content"]["nodeId"], + limit=1, + offset=0, + fill_path=False, ), ) @@ -293,7 +367,7 @@ async def list_packages_in_dataset( for pck in islice(dataset_pck["children"], offset, offset + limit) if pck["content"]["packageType"] != "Collection" ] - package_files = dict( + package_files: dict[str, list[DatCorePackageMetaData]] = dict( await logged_gather( *package_files_tasks, log=logger, @@ -302,7 +376,7 @@ async def list_packages_in_dataset( ) return ( [ - FileMetaData.from_pennsieve_package( + _to_file_meta_data( pck, ( package_files[pck["content"]["id"]] @@ -353,7 +427,7 @@ async def list_packages_in_collection( return ( [ - FileMetaData.from_pennsieve_package( + _to_file_meta_data( pck, ( package_files[pck["content"]["id"]] @@ -433,7 +507,7 @@ async def list_all_dataset_files( file_path = base_path / _compute_file_path(all_packages, package) file_meta_data.append( - FileMetaData.from_pennsieve_package( + _to_file_meta_data( package, package_files[package_id], file_path.parent ) ) @@ -445,11 +519,16 @@ async def get_presigned_download_link( ) -> URL: """returns the presigned download link of the first file in the package""" files = await self.get_package_files( - api_key, api_secret, package_id, limit=1, offset=0 + api_key=api_key, + api_secret=api_secret, + package_id=package_id, + limit=1, + offset=0, + fill_path=False, ) # NOTE: this was done like this in the original dsm. we might encounter a problem when there are more than one files assert len(files) == 1 # nosec - file_id = files[0]["content"]["id"] + file_id = files[0].id file_link = cast( dict[str, Any], await self._request( diff --git a/services/datcore-adapter/tests/unit/conftest.py b/services/datcore-adapter/tests/unit/conftest.py index e58076ad1275..be4e44d726f7 100644 --- a/services/datcore-adapter/tests/unit/conftest.py +++ b/services/datcore-adapter/tests/unit/conftest.py @@ -15,6 +15,7 @@ import simcore_service_datcore_adapter from asgi_lifespan import LifespanManager from fastapi.applications import FastAPI +from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_mock import MockFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_datcore_adapter.modules.pennsieve import ( @@ -24,6 +25,7 @@ from starlette.testclient import TestClient pytest_plugins = [ + "pytest_simcore.cli_runner", "pytest_simcore.environment_configs", "pytest_simcore.repository_paths", "pytest_simcore.pytest_global_environs", @@ -317,15 +319,40 @@ async def pennsieve_subsystem_mock( # get collection packages mock.get( - f"https://api.pennsieve.io/packages/{pennsieve_collection_id}" + rf"https://api.pennsieve.io/packages/{pennsieve_collection_id}" ).respond( status.HTTP_200_OK, json={ "content": {"name": "this package name is also awesome"}, "children": pennsieve_mock_dataset_packages["packages"], "ancestors": [ - {"content": {"name": "Bigger guy"}}, - {"content": {"name": "Big guy"}}, + { + "content": { + "name": "Bigger guy", + } + }, + { + "content": { + "name": "Big guy", + } + }, + ], + }, + ) + # get package ancestry + mock.get( + url__regex=rf"https://api.pennsieve.io/packages/{pennsieve_file_id}\?includeAncestors=(?P.+)$" + ).respond( + status.HTTP_200_OK, + json={ + "content": { + "datasetId": pennsieve_dataset_id, + "name": pennsieve_file_id, + }, + "ancestors": [ + {"content": {"id": faker.pystr(), "name": faker.name()}}, + {"content": {"id": faker.pystr(), "name": faker.name()}}, + {"content": {"id": faker.pystr(), "name": faker.name()}}, ], }, ) @@ -334,7 +361,22 @@ async def pennsieve_subsystem_mock( url__regex=r"https://api.pennsieve.io/packages/.+/files\?limit=1&offset=0$" ).respond( status.HTTP_200_OK, - json=[{"content": {"size": 12345, "id": "fake_file_id"}}], + json=[ + jsonable_encoder( + { + "content": { + "size": 12345, + "id": faker.pyint(), + "packageId": "N:package:475beff2-03c8-4dca-a221-d1d02e17f064", + "name": faker.file_name(), + "filename": faker.file_name(), + "s3bucket": faker.pystr(), + "createdAt": faker.date_time(), + "updatedAt": faker.date_time(), + } + } + ) + ], ) # download file diff --git a/services/datcore-adapter/tests/unit/test_cli.py b/services/datcore-adapter/tests/unit/test_cli.py new file mode 100644 index 000000000000..ef7b2b8a4f61 --- /dev/null +++ b/services/datcore-adapter/tests/unit/test_cli.py @@ -0,0 +1,35 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +import os + +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_datcore_adapter._meta import API_VERSION +from simcore_service_datcore_adapter.cli import main +from simcore_service_datcore_adapter.core.settings import ApplicationSettings +from typer.testing import CliRunner + + +def test_cli_help_and_version(cli_runner: CliRunner): + result = cli_runner.invoke(main, "--help") + assert result.exit_code == os.EX_OK, result.output + + result = cli_runner.invoke(main, "--version") + assert result.exit_code == os.EX_OK, result.output + assert result.stdout.strip() == API_VERSION + + +def test_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): + result = cli_runner.invoke(main, ["settings", "--show-secrets", "--as-json"]) + assert result.exit_code == os.EX_OK + + print(result.output) + settings = ApplicationSettings(result.output) + assert settings.model_dump() == ApplicationSettings.create_from_envs().model_dump() + + +def test_run(cli_runner: CliRunner): + result = cli_runner.invoke(main, ["run"]) + assert result.exit_code == 0 + assert "disabled" in result.stdout diff --git a/services/datcore-adapter/tests/unit/test_exceptions_handlers.py b/services/datcore-adapter/tests/unit/test_exceptions_handlers.py new file mode 100644 index 000000000000..53a28bb736cb --- /dev/null +++ b/services/datcore-adapter/tests/unit/test_exceptions_handlers.py @@ -0,0 +1,162 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +from collections.abc import AsyncIterator + +import httpx +import pytest +from botocore.exceptions import ClientError +from fastapi import FastAPI, HTTPException, status +from fastapi.exceptions import RequestValidationError +from httpx import AsyncClient +from pydantic import ValidationError +from pytest_simcore.helpers.httpx_assert_checks import assert_status +from simcore_service_datcore_adapter.errors.handlers import set_exception_handlers + + +@pytest.fixture +def initialized_app() -> FastAPI: + app = FastAPI() + set_exception_handlers(app) + return app + + +@pytest.fixture +async def client(initialized_app: FastAPI) -> AsyncIterator[AsyncClient]: + async with AsyncClient( + transport=httpx.ASGITransport(app=initialized_app), + base_url="http://test", + headers={"Content-Type": "application/json"}, + ) as client: + yield client + + +@pytest.mark.parametrize( + "exception, status_code", + [ + ( + ClientError( + { + "Status": "pytest status", + "StatusReason": "pytest", + "Error": { + "Code": "NotAuthorizedException", + "Message": "pytest message", + }, + }, + operation_name="pytest operation", + ), + status.HTTP_401_UNAUTHORIZED, + ), + ( + ClientError( + { + "Status": "pytest status", + "StatusReason": "pytest", + "Error": { + "Code": "Whatever", + "Message": "pytest message", + }, + }, + operation_name="pytest operation", + ), + status.HTTP_500_INTERNAL_SERVER_ERROR, + ), + ( + NotImplementedError("pytest not implemented error"), + status.HTTP_501_NOT_IMPLEMENTED, + ), + ], + ids=str, +) +async def test_exception_handlers( + initialized_app: FastAPI, + client: AsyncClient, + exception: Exception, + status_code: int, +): + @initialized_app.get("/test") + async def test_endpoint(): + raise exception + + response = await client.get("/test") + assert_status( + response, + status_code, + None, + expected_msg=f"{exception}".replace("(", "\\(").replace(")", "\\)"), + ) + + +async def test_generic_http_exception_handler( + initialized_app: FastAPI, client: AsyncClient +): + @initialized_app.get("/test") + async def test_endpoint(): + raise HTTPException(status_code=status.HTTP_410_GONE) + + response = await client.get("/test") + assert_status(response, status.HTTP_410_GONE, None, expected_msg="Gone") + + +async def test_request_validation_error_handler( + initialized_app: FastAPI, client: AsyncClient +): + _error_msg = "pytest request validation error" + + @initialized_app.get("/test") + async def test_endpoint(): + raise RequestValidationError(errors=[_error_msg]) + + response = await client.get("/test") + assert_status( + response, + status.HTTP_422_UNPROCESSABLE_ENTITY, + None, + expected_msg=_error_msg, + ) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + +async def test_validation_error_handler(initialized_app: FastAPI, client: AsyncClient): + _error_msg = "pytest request validation error" + + @initialized_app.get("/test") + async def test_endpoint(): + raise ValidationError.from_exception_data( + _error_msg, + line_errors=[], + ) + + response = await client.get("/test") + assert_status( + response, + status.HTTP_500_INTERNAL_SERVER_ERROR, + None, + expected_msg=f"0 validation errors for {_error_msg}", + ) + + +@pytest.mark.xfail( + reason="Generic exception handler is not working as expected as shown in https://github.com/ITISFoundation/osparc-simcore/blob/5732a12e07e63d5ce55010ede9b9ab543bb9b278/packages/service-library/tests/fastapi/test_exceptions_utils.py" +) +async def test_generic_exception_handler(initialized_app: FastAPI, client: AsyncClient): + _error_msg = "Generic pytest exception" + + @initialized_app.get("/test") + async def test_endpoint(): + raise Exception( # pylint: disable=broad-exception-raised # noqa: TRY002 + _error_msg + ) + + response = await client.get("/test") + assert_status( + response, + status.HTTP_500_INTERNAL_SERVER_ERROR, + None, + expected_msg=_error_msg, + ) diff --git a/services/datcore-adapter/tests/unit/test_route_datasets.py b/services/datcore-adapter/tests/unit/test_route_datasets.py index 2a0d7dc85d6f..1bfd55269fcf 100644 --- a/services/datcore-adapter/tests/unit/test_route_datasets.py +++ b/services/datcore-adapter/tests/unit/test_route_datasets.py @@ -6,11 +6,11 @@ import httpx import respx from fastapi_pagination import Page -from pydantic import TypeAdapter -from simcore_service_datcore_adapter.models.schemas.datasets import ( +from models_library.api_schemas_datcore_adapter.datasets import ( DatasetMetaData, FileMetaData, ) +from pydantic import TypeAdapter from starlette import status diff --git a/services/datcore-adapter/tests/unit/test_route_files.py b/services/datcore-adapter/tests/unit/test_route_files.py index cbaa09704faf..1a083d71daa2 100644 --- a/services/datcore-adapter/tests/unit/test_route_files.py +++ b/services/datcore-adapter/tests/unit/test_route_files.py @@ -6,7 +6,7 @@ import httpx from pydantic import TypeAdapter -from simcore_service_datcore_adapter.models.domains.files import FileDownloadOut +from simcore_service_datcore_adapter.models.files import FileDownloadOut from starlette import status diff --git a/services/datcore-adapter/tests/unit/test_route_health.py b/services/datcore-adapter/tests/unit/test_route_health.py index 3f0b1712f7ea..65f04aece0c6 100644 --- a/services/datcore-adapter/tests/unit/test_route_health.py +++ b/services/datcore-adapter/tests/unit/test_route_health.py @@ -17,8 +17,7 @@ async def test_live_entrypoint(async_client: httpx.AsyncClient): assert response.text assert datetime.fromisoformat(response.text.split("@")[1]) assert ( - response.text.split("@")[0] - == "simcore_service_datcore_adapter.api.routes.health" + response.text.split("@")[0] == "simcore_service_datcore_adapter.api.rest.health" ) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 0c32076f8eb2..a7cd2c549701 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -1142,8 +1142,9 @@ services: networks: - storage_subnet environment: - DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} DATCORE_ADAPTER_LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} + DATCORE_ADAPTER_TRACING: ${DATCORE_ADAPTER_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}