Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 23 additions & 3 deletions packages/models-library/src/models_library/projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,6 @@
from uuid import UUID

from common_library.basic_types import DEFAULT_FACTORY
from models_library.basic_types import ConstrainedStr
from models_library.folders import FolderID
from models_library.workspaces import WorkspaceID
from pydantic import (
BaseModel,
ConfigDict,
Expand All @@ -21,8 +18,11 @@
)

from .basic_regex import DATE_RE, UUID_RE_BASE
from .basic_types import ConstrainedStr
from .emails import LowerCaseEmailStr
from .folders import FolderID
from .groups import GroupID
from .products import ProductName
from .projects_access import AccessRights, GroupIDStr
from .projects_nodes import Node
from .projects_nodes_io import NodeIDStr
Expand All @@ -33,6 +33,7 @@
none_to_empty_str_pre_validator,
)
from .utils.enums import StrAutoEnum
from .workspaces import WorkspaceID

ProjectID: TypeAlias = UUID
CommitID: TypeAlias = int
Expand Down Expand Up @@ -147,6 +148,25 @@ def _convert_sql_alchemy_enum(cls, v):
)


class ProjectListAtDB(BaseProjectModel):
id: int
type: ProjectType
template_type: ProjectTemplateType | None
prj_owner: int | None
ui: dict[str, Any] | None
classifiers: list[ClassifierID] | None
dev: dict[str, Any] | None
quality: dict[str, Any]
published: bool | None
hidden: bool
workspace_id: WorkspaceID | None
trashed: datetime | None
trashed_by: UserID | None
trashed_explicitly: bool
product_name: ProductName
folder_id: FolderID | None


class Project(BaseProjectModel):
# NOTE: This is the pydantic pendant of project-v0.0.1.json used in the API of the webserver/webclient
# NOT for usage with DB!!
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ class Node(BaseModel):
),
] = None

thumbnail: Annotated[
thumbnail: Annotated[ # <-- (DEPRECATED) Can be removed
str | HttpUrl | None,
Field(
description="url of the latest screenshot of the node",
Expand Down Expand Up @@ -232,18 +232,18 @@ class Node(BaseModel):
] = DEFAULT_FACTORY

output_node: Annotated[bool | None, Field(deprecated=True, alias="outputNode")] = (
None
None # <-- (DEPRECATED) Can be removed
)

output_nodes: Annotated[
output_nodes: Annotated[ # <-- (DEPRECATED) Can be removed
list[NodeID] | None,
Field(
description="Used in group-nodes. Node IDs of those connected to the output",
alias="outputNodes",
),
] = None

parent: Annotated[
parent: Annotated[ # <-- (DEPRECATED) Can be removed
NodeID | None,
Field(
description="Parent's (group-nodes') node ID s. Used to group",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from aiohttp import web
from models_library.folders import FolderID, FolderQuery, FolderScope
from models_library.projects import ProjectID, ProjectTemplateType
from models_library.projects import ProjectTemplateType
from models_library.rest_ordering import OrderBy
from models_library.users import UserID
from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope
Expand All @@ -23,11 +23,13 @@
from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB

from ..folders import _folders_repository
from ..users.api import get_user_email_legacy
from ..workspaces.api import check_user_workspace_access
from . import _projects_service
from ._access_rights_repository import batch_get_project_access_rights
from ._projects_repository import batch_get_trashed_by_primary_gid
from ._projects_repository_legacy import ProjectDBAPI
from ._projects_repository_legacy_utils import convert_to_schema_names
from .models import ProjectDict, ProjectTypeAPI


Expand All @@ -53,15 +55,14 @@ async def _aggregate_data_to_projects_from_other_sources(
app: web.Application,
*,
db_projects: list[ProjectDict],
db_project_types: list[ProjectTypeDB],
user_id: UserID,
) -> list[ProjectDict]:
"""
Aggregates data to each project from other sources, first as a batch-update and then as a parallel-update.
"""
# updating `project.trashed_by_primary_gid`
trashed_by_primary_gid_values = await batch_get_trashed_by_primary_gid(
app, projects_uuids=[ProjectID(p["uuid"]) for p in db_projects]
app, projects_uuids=[p["uuid"] for p in db_projects]
)

_batch_update("trashed_by_primary_gid", trashed_by_primary_gid_values, db_projects)
Expand All @@ -70,7 +71,7 @@ async def _aggregate_data_to_projects_from_other_sources(
project_to_access_rights = await batch_get_project_access_rights(
app=app,
projects_uuids_with_workspace_id=[
(ProjectID(p["uuid"]), p["workspaceId"]) for p in db_projects
(p["uuid"], p["workspaceId"]) for p in db_projects
],
)

Expand All @@ -79,22 +80,40 @@ async def _aggregate_data_to_projects_from_other_sources(
_projects_service.add_project_states_for_user(
user_id=user_id,
project=prj,
is_template=prj_type == ProjectTypeDB.TEMPLATE,
is_template=prj["type"] == ProjectTypeDB.TEMPLATE.value,
app=app,
)
for prj, prj_type in zip(db_projects, db_project_types, strict=False)
for prj in db_projects
]

updated_projects: list[ProjectDict] = await _paralell_update(
*update_state_per_project,
)

for project in updated_projects:
project["accessRights"] = project_to_access_rights[project["uuid"]]
project["accessRights"] = project_to_access_rights[f"{project['uuid']}"]

return updated_projects


async def _legacy_convert_db_projects_to_api_projects(
app: web.Application,
db,
db_projects: list[dict[str, Any]],
) -> list[dict]:
"""
Converts db schema projects to API schema (legacy postprocessing).
"""
api_projects: list[dict] = []
for db_prj in db_projects:
db_prj_dict = db_prj
db_prj_dict.pop("product_name", None)
db_prj_dict["tags"] = await db.get_tags_by_project(project_id=f"{db_prj['id']}")
user_email = await get_user_email_legacy(app, db_prj["prj_owner"])
api_projects.append(convert_to_schema_names(db_prj_dict, user_email))
return api_projects


async def list_projects( # pylint: disable=too-many-arguments
app: web.Application,
user_id: UserID,
Expand Down Expand Up @@ -140,7 +159,7 @@ async def list_projects( # pylint: disable=too-many-arguments
workspace_id=workspace_id,
)

db_projects, db_project_types, total_number_projects = await db.list_projects_dicts(
db_projects, total_number_projects = await db.list_projects_dicts(
product_name=product_name,
user_id=user_id,
workspace_query=(
Expand Down Expand Up @@ -172,11 +191,15 @@ async def list_projects( # pylint: disable=too-many-arguments
order_by=order_by,
)

projects = await _aggregate_data_to_projects_from_other_sources(
app, db_projects=db_projects, db_project_types=db_project_types, user_id=user_id
api_projects = await _legacy_convert_db_projects_to_api_projects(
app, db, db_projects
)

return projects, total_number_projects
final_projects = await _aggregate_data_to_projects_from_other_sources(
app, db_projects=api_projects, user_id=user_id
)

return final_projects, total_number_projects


async def list_projects_full_depth(
Expand All @@ -196,7 +219,7 @@ async def list_projects_full_depth(
) -> tuple[list[ProjectDict], int]:
db = ProjectDBAPI.get_from_app_context(app)

db_projects, db_project_types, total_number_projects = await db.list_projects_dicts(
db_projects, total_number_projects = await db.list_projects_dicts(
product_name=product_name,
user_id=user_id,
workspace_query=WorkspaceQuery(workspace_scope=WorkspaceScope.ALL),
Expand All @@ -210,8 +233,12 @@ async def list_projects_full_depth(
order_by=order_by,
)

projects = await _aggregate_data_to_projects_from_other_sources(
app, db_projects=db_projects, db_project_types=db_project_types, user_id=user_id
api_projects = await _legacy_convert_db_projects_to_api_projects(
app, db, db_projects
)

final_projects = await _aggregate_data_to_projects_from_other_sources(
app, db_projects=api_projects, user_id=user_id
)

return projects, total_number_projects
return final_projects, total_number_projects
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,11 @@
from models_library.folders import FolderQuery, FolderScope
from models_library.groups import GroupID
from models_library.products import ProductName
from models_library.projects import ProjectID, ProjectIDStr
from models_library.projects import (
ProjectID,
ProjectIDStr,
ProjectListAtDB,
)
from models_library.projects_comments import CommentID, ProjectsCommentsDB
from models_library.projects_nodes import Node
from models_library.projects_nodes_io import NodeID, NodeIDStr
Expand Down Expand Up @@ -584,7 +588,7 @@ async def list_projects_dicts( # pylint: disable=too-many-arguments,too-many-st
limit: int | None = None,
# order
order_by: OrderBy = DEFAULT_ORDER_BY,
) -> tuple[list[ProjectDict], list[ProjectType], int]:
) -> tuple[list[dict[str, Any]], int]:
async with self.engine.acquire() as conn:
user_groups_proxy: list[RowProxy] = await self._list_user_groups(
conn, user_id
Expand Down Expand Up @@ -667,14 +671,18 @@ async def list_projects_dicts( # pylint: disable=too-many-arguments,too-many-st
projects.c.id,
)

prjs, prj_types = await self._execute_without_permission_check(
conn,
select_projects_query=combined_query.offset(offset).limit(limit),
)
prjs_output = []
async for row in conn.execute(combined_query.offset(offset).limit(limit)):
# NOTE: Historically, projects were returned as a dictionary. I have created a model that
# validates the DB row, but this model includes some default values inside the Workbench Node model.
# Therefore, if we use this model, it will return those default values, which is not backward-compatible
# with the frontend. The frontend would need to check and adapt how it handles default values in
# Workbench nodes, which are currently not returned if not set in the DB.
ProjectListAtDB.model_validate(row)
prjs_output.append(dict(row.items()))

return (
prjs,
prj_types,
prjs_output,
cast(int, total_count),
)

Expand Down Expand Up @@ -1249,6 +1257,13 @@ async def remove_tag(
project["tags"].remove(tag_id)
return convert_to_schema_names(project, user_email)

async def get_tags_by_project(self, project_id: str) -> list[int]:
async with self.engine.acquire() as conn:
query = sa.select(projects_tags.c.tag_id).where(
projects_tags.c.project_id == project_id
)
return [row.tag_id async for row in conn.execute(query)]

#
# Project Comments
#
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import asyncio
import logging
from collections.abc import Mapping
from copy import deepcopy
Expand All @@ -9,23 +8,27 @@
import sqlalchemy as sa
from aiopg.sa.connection import SAConnection
from aiopg.sa.result import RowProxy
from models_library.projects import ProjectAtDB, ProjectID, ProjectTemplateType
from models_library.projects import ProjectID, ProjectType
from models_library.projects_nodes import Node
from models_library.projects_nodes_io import NodeIDStr
from models_library.utils.change_case import camel_to_snake, snake_to_camel
from pydantic import ValidationError
from simcore_postgres_database.models.project_to_groups import project_to_groups
from simcore_postgres_database.webserver_models import ProjectType, projects
from simcore_postgres_database.webserver_models import (
ProjectTemplateType as ProjectTemplateTypeDB,
)
from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB
from simcore_postgres_database.webserver_models import (
projects,
)
from sqlalchemy.dialects.postgresql import insert as pg_insert
from sqlalchemy.sql.selectable import CompoundSelect, Select

from ..db.models import GroupType, groups, projects_tags, user_to_groups, users
from ..users.exceptions import UserNotFoundError
from ..utils import format_datetime
from ._projects_repository import PROJECT_DB_COLS
from .exceptions import (
NodeNotFoundError,
ProjectInvalidRightsError,
ProjectInvalidUsageError,
ProjectNotFoundError,
)
Expand Down Expand Up @@ -91,9 +94,9 @@ def convert_to_schema_names(
if col_name == "prj_owner":
# this entry has to be converted to the owner e-mail address
converted_value = user_email
if col_name == "type" and isinstance(col_value, ProjectType):
if col_name == "type" and isinstance(col_value, ProjectTypeDB):
converted_value = col_value.value
if col_name == "template_type" and isinstance(col_value, ProjectTemplateType):
if col_name == "template_type" and isinstance(col_value, ProjectTemplateTypeDB):
converted_value = col_value.value

if col_name in SCHEMA_NON_NULL_KEYS and col_value is None:
Expand Down Expand Up @@ -184,50 +187,6 @@ async def _upsert_tags_in_project(
.on_conflict_do_nothing()
)

async def _execute_without_permission_check(
self,
conn: SAConnection,
*,
select_projects_query: Select | CompoundSelect,
) -> tuple[list[dict[str, Any]], list[ProjectType]]:
api_projects: list[dict] = [] # API model-compatible projects
db_projects: list[dict] = [] # DB model-compatible projects
project_types: list[ProjectType] = []
async for row in conn.execute(select_projects_query):
assert isinstance(row, RowProxy) # nosec
try:
await asyncio.get_event_loop().run_in_executor(
None, ProjectAtDB.model_validate, row
)

except ProjectInvalidRightsError:
continue

except ValidationError as exc:
logger.warning(
"project %s failed validation, please check. error: %s",
f"{row.id=}",
exc,
)
continue

prj: dict[str, Any] = dict(row.items())
prj.pop("product_name", None)

db_projects.append(prj)

# NOTE: DO NOT nest _get_tags_by_project in async loop above !!!
# FIXME: temporary avoids inner async loops issue https://github.com/aio-libs/aiopg/issues/535
for db_prj in db_projects:
db_prj["tags"] = await self._get_tags_by_project(
conn, project_id=db_prj["id"]
)
user_email = await self._get_user_email(conn, db_prj["prj_owner"])
api_projects.append(convert_to_schema_names(db_prj, user_email))
project_types.append(db_prj["type"])

return (api_projects, project_types)

async def _get_project(
self,
connection: SAConnection,
Expand Down
Loading
Loading