Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions services/api-server/openapi.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{

Check failure on line 1 in services/api-server/openapi.json

View workflow job for this annotation

GitHub Actions / check OAS' are up to date

Error when checking services/api-server/openapi.json
"openapi": "3.1.0",
"info": {
"title": "osparc.io public API",
Expand Down Expand Up @@ -4593,7 +4593,6 @@
"studies"
],
"summary": "Create Study Job",
"description": "hidden -- if True (default) hides project from UI",
"operationId": "create_study_job",
"security": [
{
Expand All @@ -4617,9 +4616,11 @@
"required": false,
"schema": {
"type": "boolean",
"description": "If True (default) hides project from UI",
"default": true,
"title": "Hidden"
}
},
"description": "If True (default) hides project from UI"
},
{
"name": "x-simcore-parent-project-uuid",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,13 +64,16 @@ async def list_jobs(
# 2. Convert projects to jobs
jobs: list[Job] = []
for project_job in projects_page.data:

# NOTE: this is only valid for jobs created for Solvers or Programs which expect a single node per project!

assert ( # nosec
len(project_job.workbench) == 1
), "Expected only one solver node in workbench"

solver_node = next(iter(project_job.workbench.values()))
one_project_node = next(iter(project_job.workbench.values()))
job_inputs: JobInputs = create_job_inputs_from_node_inputs(
inputs=solver_node.inputs or {}
inputs=one_project_node.inputs or {}
)
assert project_job.job_parent_resource_name # nosec

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ async def list_jobs(
"solvers", # solver_id, "releases", solver_version, "jobs",
]
if filter_by_solver_key:
collection_or_resource_ids.append(filter_by_solver_key)
collection_or_resource_ids.append(f"{filter_by_solver_key}")
if filter_by_solver_version:
collection_or_resource_ids.append("releases")
collection_or_resource_ids.append(filter_by_solver_version)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
from dataclasses import dataclass
from uuid import UUID

from models_library.api_schemas_webserver.projects import ProjectPatch
from models_library.api_schemas_webserver.projects_nodes import NodeOutputs
from models_library.function_services_catalog.services import file_picker
from models_library.products import ProductName
from models_library.projects import ProjectID
from models_library.projects_nodes import InputID, InputTypes
from models_library.projects_nodes_io import NodeID
from models_library.rest_pagination import (
MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE,
PageMetaInfoLimitOffset,
Expand All @@ -11,9 +18,15 @@

from ._service_jobs import JobService
from ._service_utils import check_user_product_consistency
from .api.dependencies.webserver_http import AuthSession
from .api.dependencies.webserver_rpc import WbApiRpcClient
from .models.api_resources import compose_resource_name
from .models.schemas.jobs import Job
from .models.schemas.jobs import Job, JobInputs
from .models.schemas.studies import StudyID
from .services_http.study_job_models_converters import (
create_job_from_study,
get_project_and_file_inputs_from_job_inputs,
)

DEFAULT_PAGINATION_LIMIT = MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE - 1

Expand All @@ -23,6 +36,8 @@ class StudyService:
job_service: JobService
user_id: UserID
product_name: ProductName
webserver_api: AuthSession
wb_api_rpc: WbApiRpcClient

def __post_init__(self):
check_user_product_consistency(
Expand All @@ -39,7 +54,11 @@ async def list_jobs(
pagination_offset: PageOffsetInt | None = None,
pagination_limit: PageLimitInt | None = None,
) -> tuple[list[Job], PageMetaInfoLimitOffset]:
"""Lists all solver jobs for a user with pagination"""
"""Lists all study jobs for a user with pagination


If filter_by_study_id==None then it lists all study-jobs of the user
"""

# 1. Compose job parent resource name prefix
collection_or_resource_ids: list[str] = [
Expand All @@ -52,7 +71,76 @@ async def list_jobs(

# 2. list jobs under job_parent_resource_name
return await self.job_service.list_jobs(
# FIXME: these jobs are only valid for solvers and programs but not studies!
job_parent_resource_name=job_parent_resource_name,
pagination_offset=pagination_offset,
pagination_limit=pagination_limit,
)

async def create_job(
self,
*,
study_id: StudyID,
job_inputs: JobInputs,
hidden: bool = True,
parent_project_uuid: ProjectID | None = None,
parent_node_id: NodeID | None = None,
) -> Job:
"""Creates a job from a study"""
project = await self.webserver_api.clone_project(
project_id=study_id,
hidden=hidden,
parent_project_uuid=parent_project_uuid,
parent_node_id=parent_node_id,
)
job = create_job_from_study(
study_key=study_id, project=project, job_inputs=job_inputs
)

await self.webserver_api.patch_project(
project_id=job.id,
patch_params=ProjectPatch(name=job.name), # type: ignore[arg-type]
)

await self.wb_api_rpc.mark_project_as_job(
product_name=self.product_name,
user_id=self.user_id,
project_uuid=job.id,
job_parent_resource_name=job.runner_name,
)

project_inputs = await self.webserver_api.get_project_inputs(
project_id=project.uuid
)

file_param_nodes = {}
for node_id, node in project.workbench.items():
if (
node.key == file_picker.META.key
and node.outputs is not None
and len(node.outputs) == 0
):
file_param_nodes[node.label] = node_id

file_inputs: dict[InputID, InputTypes] = {}

(
new_project_inputs,
new_project_file_inputs,
) = get_project_and_file_inputs_from_job_inputs(
project_inputs, file_inputs, job_inputs
)

for node_label, file_link in new_project_file_inputs.items():
await self.webserver_api.update_node_outputs(
project_id=project.uuid,
node_id=UUID(file_param_nodes[node_label]),
new_node_outputs=NodeOutputs(outputs={"outFile": file_link}),
)

if len(new_project_inputs) > 0:
await self.webserver_api.update_project_inputs(
project_id=project.uuid, new_inputs=new_project_inputs
)

return job
Original file line number Diff line number Diff line change
Expand Up @@ -98,11 +98,19 @@ def get_solver_service(

def get_study_service(
job_service: Annotated[JobService, Depends(get_job_service)],
webserver_api: Annotated[AuthSession, Depends(get_webserver_session)],
wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)],
user_id: Annotated[UserID, Depends(get_current_user_id)],
product_name: Annotated[ProductName, Depends(get_product_name)],
) -> StudyService:
"""
"Assembles" the StudyService layer to the underlying service and client interfaces
in the context of the rest controller (i.e. api/dependencies)
"""
return StudyService(
job_service=job_service,
webserver_api=webserver_api,
wb_api_rpc=wb_api_rpc,
user_id=user_id,
product_name=product_name,
)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,18 +1,13 @@
import logging
from collections.abc import Callable
from typing import Annotated
from uuid import UUID

from fastapi import APIRouter, Depends, Header, Query, Request, status
from fastapi.encoders import jsonable_encoder
from fastapi.responses import JSONResponse
from fastapi_pagination.api import create_page
from models_library.api_schemas_webserver.projects import ProjectPatch
from models_library.api_schemas_webserver.projects_nodes import NodeOutputs
from models_library.clusters import ClusterID
from models_library.function_services_catalog.services import file_picker
from models_library.projects import ProjectID
from models_library.projects_nodes import InputID, InputTypes
from models_library.projects_nodes_io import NodeID
from pydantic import HttpUrl, PositiveInt
from servicelib.logging_utils import log_context
Expand Down Expand Up @@ -42,19 +37,13 @@
from ...services_http.solver_job_models_converters import create_jobstatus_from_task
from ...services_http.storage import StorageApi
from ...services_http.study_job_models_converters import (
create_job_from_study,
create_job_outputs_from_project_outputs,
get_project_and_file_inputs_from_job_inputs,
)
from ...services_http.webserver import AuthSession
from ...services_rpc.wb_api_server import WbApiRpcClient
from ..dependencies.application import get_reverse_url_mapper
from ..dependencies.authentication import get_current_user_id, get_product_name
from ..dependencies.authentication import get_current_user_id
from ..dependencies.services import get_api_client, get_study_service
from ..dependencies.webserver_http import AuthSession, get_webserver_session
from ..dependencies.webserver_rpc import (
get_wb_api_rpc_client,
)
from ._constants import (
FMSG_CHANGELOG_CHANGED_IN_VERSION,
FMSG_CHANGELOG_NEW_IN_VERSION,
Expand Down Expand Up @@ -98,9 +87,6 @@ async def list_study_jobs(
study_service: Annotated[StudyService, Depends(get_study_service)],
url_for: Annotated[Callable, Depends(get_reverse_url_mapper)],
):
msg = f"list study jobs study_id={study_id!r} with pagination={page_params!r}. SEE https://github.com/ITISFoundation/osparc-simcore/issues/4177"
_logger.debug(msg)

jobs, meta = await study_service.list_jobs(
filter_by_study_id=study_id,
pagination_offset=page_params.offset,
Expand Down Expand Up @@ -128,27 +114,26 @@ async def list_study_jobs(
async def create_study_job(
study_id: StudyID,
job_inputs: JobInputs,
webserver_api: Annotated[AuthSession, Depends(get_webserver_session)],
wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)],
study_service: Annotated[StudyService, Depends(get_study_service)],
url_for: Annotated[Callable, Depends(get_reverse_url_mapper)],
user_id: Annotated[PositiveInt, Depends(get_current_user_id)],
product_name: Annotated[str, Depends(get_product_name)],
hidden: Annotated[bool, Query()] = True, # noqa: FBT002
x_simcore_parent_project_uuid: ProjectID | None = Header(default=None),
x_simcore_parent_node_id: NodeID | None = Header(default=None),
# Filters
hidden: Annotated[ # noqa: FBT002
bool, Query(description="Hide this study in web UI (default: True)")
] = True,
# Headers
x_simcore_parent_project_uuid: Annotated[ProjectID | None, Header()] = None,
x_simcore_parent_node_id: Annotated[NodeID | None, Header()] = None,
) -> Job:
"""
hidden -- if True (default) hides project from UI
"""
project = await webserver_api.clone_project(
project_id=study_id,

job = await study_service.create_job(
study_id=study_id,
job_inputs=job_inputs,
hidden=hidden,
parent_project_uuid=x_simcore_parent_project_uuid,
parent_node_id=x_simcore_parent_node_id,
)
job = create_job_from_study(
study_key=study_id, project=project, job_inputs=job_inputs
)

# Set URLs in the controller
job.url = url_for(
"get_study_job",
study_id=study_id,
Expand All @@ -161,50 +146,6 @@ async def create_study_job(
job_id=job.id,
)

await webserver_api.patch_project(
project_id=job.id,
patch_params=ProjectPatch(name=job.name),
)

await wb_api_rpc.mark_project_as_job(
product_name=product_name,
user_id=user_id,
project_uuid=job.id,
job_parent_resource_name=job.runner_name,
)

project_inputs = await webserver_api.get_project_inputs(project_id=project.uuid)

file_param_nodes = {}
for node_id, node in project.workbench.items():
if (
node.key == file_picker.META.key
and node.outputs is not None
and len(node.outputs) == 0
):
file_param_nodes[node.label] = node_id

file_inputs: dict[InputID, InputTypes] = {}

(
new_project_inputs,
new_project_file_inputs,
) = get_project_and_file_inputs_from_job_inputs(
project_inputs, file_inputs, job_inputs
)

for node_label, file_link in new_project_file_inputs.items():
await webserver_api.update_node_outputs(
project_id=project.uuid,
node_id=UUID(file_param_nodes[node_label]),
new_node_outputs=NodeOutputs(outputs={"outFile": file_link}),
)

if len(new_project_inputs) > 0:
await webserver_api.update_project_inputs(
project_id=project.uuid, new_inputs=new_project_inputs
)

assert job.name == _compose_job_resource_name(study_id, job.id)

return job
Expand Down
4 changes: 4 additions & 0 deletions services/api-server/tests/unit/service/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,12 +132,16 @@ def solver_service(
@pytest.fixture
def study_service(
job_service: JobService,
auth_session: AuthSession,
wb_api_rpc_client: WbApiRpcClient,
product_name: ProductName,
user_id: UserID,
) -> StudyService:

return StudyService(
job_service=job_service,
webserver_api=auth_session,
wb_api_rpc=wb_api_rpc_client,
user_id=user_id,
product_name=product_name,
)
Expand Down
Loading
Loading