Skip to content
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -629,3 +629,65 @@ def random_service_consume_filetype(

data.update(overrides)
return data


def random_group_classifier(
*,
gid: int,
fake: Faker = DEFAULT_FAKER,
**overrides,
) -> dict[str, Any]:
from simcore_postgres_database.models.classifiers import group_classifiers

data = {
"gid": gid,
"bundle": {
"vcs_ref": "asdfasdf",
"vcs_url": "https://foo.classifiers.git",
"build_date": "2021-01-20T15:19:30Z",
"classifiers": {
"project::dak": {
"url": None,
"logo": None,
"aliases": [],
"related": [],
"markdown": "",
"released": None,
"classifier": "project::dak",
"created_by": "Nicolas Chavannes",
"github_url": None,
"display_name": "DAK",
"wikipedia_url": None,
"short_description": None,
},
"organization::zmt": {
"url": "https://zmt.swiss/",
"logo": None,
"aliases": ["Zurich MedTech AG"],
"related": [],
"markdown": "Zurich MedTech AG (ZMT) offers tools and best practices for targeted life sciences applications to simulate, analyze, and predict complex and dynamic biological processes and interactions. ZMT is a member of Zurich43",
"released": None,
"classifier": "organization::zmt",
"created_by": "crespo",
"github_url": None,
"display_name": "ZMT",
"wikipedia_url": None,
"short_description": "ZMT is a member of Zurich43",
},
},
"collections": {
"jupyterlab-math": {
"items": ["crespo/osparc-demo"],
"markdown": "Curated collection of repositories with examples of notebooks to run in jupyter-python-octave-math service",
"created_by": "crespo",
"display_name": "jupyterlab-math",
}
},
},
"uses_scicrunch": False,
}

assert set(data.keys()).issubset({c.name for c in group_classifiers.columns})

data.update(overrides)
return data
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from ...projects._projects_service import get_project_for_user
from ...projects.exceptions import BaseProjectError
from ...projects.models import ProjectDict
from ...scicrunch.db import ResearchResourceRepository
from ...scicrunch.scicrunch_service import ScicrunchResourcesService
from ..exceptions import SDSException
from .template_json import write_template_json
from .xlsx.code_description import (
Expand Down Expand Up @@ -70,10 +70,10 @@ async def _add_rrid_entries(
) -> None:
rrid_entires: deque[RRIDEntry] = deque()

repo = ResearchResourceRepository(app)
service = ScicrunchResourcesService(app)
classifiers = project_data["classifiers"]
for classifier in classifiers:
scicrunch_resource = await repo.get(rrid=classifier)
scicrunch_resource = await service.get_resource_atdb(rrid=classifier)
if scicrunch_resource is None:
continue

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import logging
from typing import Any

import sqlalchemy as sa
from simcore_postgres_database.models.classifiers import group_classifiers
from simcore_postgres_database.utils_repos import pass_or_acquire_connection
from sqlalchemy.engine import Row
from sqlalchemy.ext.asyncio import AsyncConnection

from ..db.base_repository import BaseRepository

_logger = logging.getLogger(__name__)


class GroupClassifierRepository(BaseRepository):

async def _get_bundle(
self, gid: int, connection: AsyncConnection | None = None
) -> Row | None:
async with pass_or_acquire_connection(self.engine, connection) as conn:
result = await conn.execute(
sa.select(group_classifiers.c.bundle).where(
group_classifiers.c.gid == gid
)
)
return result.one_or_none()

async def get_classifiers_from_bundle(self, gid: int) -> dict[str, Any] | None:
bundle_row = await self._get_bundle(gid)
if bundle_row:
# pylint: disable=protected-access
return dict(bundle_row.bundle._mapping) # noqa: SLF001
return None

async def group_uses_scicrunch(
self, gid: int, connection: AsyncConnection | None = None
) -> bool:
async with pass_or_acquire_connection(self.engine, connection) as conn:
result = await conn.execute(
sa.select(group_classifiers.c.uses_scicrunch).where(
group_classifiers.c.gid == gid
)
)
row = result.one_or_none()
return bool(row.uses_scicrunch if row else False)
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,11 @@

from .._meta import API_VTAG
from ..login.decorators import login_required
from ..scicrunch.db import ResearchResourceRepository
from ..scicrunch.errors import ScicrunchError
from ..scicrunch.models import ResearchResource, ResourceHit
from ..scicrunch.service_client import SciCrunch
from ..scicrunch.models import ResourceHit
from ..scicrunch.scicrunch_service import ScicrunchResourcesService
from ..security.decorators import permission_required
from ..utils_aiohttp import envelope_json_response
from ._classifiers_service import GroupClassifierRepository, build_rrids_tree_view
from ._classifiers_service import GroupClassifiersService
from ._common.exceptions_handlers import handle_plugin_requests_exceptions
from ._common.schemas import GroupsClassifiersQuery, GroupsPathParams

Expand All @@ -29,23 +27,15 @@
@permission_required("groups.*")
@handle_plugin_requests_exceptions
async def get_group_classifiers(request: web.Request):
try:
path_params = parse_request_path_parameters_as(GroupsPathParams, request)
query_params: GroupsClassifiersQuery = parse_request_query_parameters_as(
GroupsClassifiersQuery, request
)

repo = GroupClassifierRepository(request.app)
if not await repo.group_uses_scicrunch(path_params.gid):
bundle = await repo.get_classifiers_from_bundle(path_params.gid)
return envelope_json_response(bundle)

# otherwise, build dynamic tree with RRIDs
view = await build_rrids_tree_view(
request.app, tree_view_mode=query_params.tree_view
)
except ScicrunchError:
view = {}
path_params = parse_request_path_parameters_as(GroupsPathParams, request)
query_params: GroupsClassifiersQuery = parse_request_query_parameters_as(
GroupsClassifiersQuery, request
)

service = GroupClassifiersService(request.app)
view = await service.get_group_classifiers(
path_params.gid, tree_view_mode=query_params.tree_view
)

return envelope_json_response(view)

Expand All @@ -59,15 +49,9 @@ async def get_group_classifiers(request: web.Request):
@handle_plugin_requests_exceptions
async def get_scicrunch_resource(request: web.Request):
rrid = request.match_info["rrid"]
rrid = SciCrunch.validate_identifier(rrid)

# check if in database first
repo = ResearchResourceRepository(request.app)
resource: ResearchResource | None = await repo.get_resource(rrid)
if not resource:
# otherwise, request to scicrunch service
scicrunch = SciCrunch.get_instance(request.app)
resource = await scicrunch.get_resource_fields(rrid)
service = ScicrunchResourcesService(request.app)
resource = await service.get_or_fetch_reseach_resource(rrid)

return envelope_json_response(resource.model_dump())

Expand All @@ -82,16 +66,8 @@ async def get_scicrunch_resource(request: web.Request):
async def add_scicrunch_resource(request: web.Request):
rrid = request.match_info["rrid"]

# check if exists
repo = ResearchResourceRepository(request.app)
resource: ResearchResource | None = await repo.get_resource(rrid)
if not resource:
# then request scicrunch service
scicrunch = SciCrunch.get_instance(request.app)
resource = await scicrunch.get_resource_fields(rrid)

# insert new or if exists, then update
await repo.upsert(resource)
service = ScicrunchResourcesService(request.app)
resource = await service.create_research_resource(rrid)

return envelope_json_response(resource.model_dump())

Expand All @@ -106,7 +82,7 @@ async def add_scicrunch_resource(request: web.Request):
async def search_scicrunch_resources(request: web.Request):
guess_name = str(request.query["guess_name"]).strip()

scicrunch = SciCrunch.get_instance(request.app)
hits: list[ResourceHit] = await scicrunch.search_resource(guess_name)
service = ScicrunchResourcesService(request.app)
hits: list[ResourceHit] = await service.search_research_resources(guess_name)

return envelope_json_response([hit.model_dump() for hit in hits])
Loading
Loading