diff --git a/alembic/versions/0e975f452a77_step_template_table.py b/alembic/versions/0e975f452a77_step_template_table.py index f14b5578..c61dfe95 100644 --- a/alembic/versions/0e975f452a77_step_template_table.py +++ b/alembic/versions/0e975f452a77_step_template_table.py @@ -1,7 +1,7 @@ """Step template table Revision ID: 0e975f452a77 -Revises: ad13850a7245 +Revises: 6868ac66ea92 Create Date: 2025-05-26 13:17:19.196643 """ @@ -12,7 +12,7 @@ # revision identifiers, used by Alembic. revision = "0e975f452a77" -down_revision = "ad13850a7245" +down_revision = "6868ac66ea92" branch_labels = None depends_on = None diff --git a/alembic/versions/36f087da55b1_adds_integration_tables.py b/alembic/versions/36f087da55b1_adds_integration_tables.py new file mode 100644 index 00000000..5966fe9a --- /dev/null +++ b/alembic/versions/36f087da55b1_adds_integration_tables.py @@ -0,0 +1,503 @@ +"""adds integration tables + +Revision ID: 36f087da55b1 +Revises: ad13850a7245 +Create Date: 2025-06-26 12:57:31.654259 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "36f087da55b1" +down_revision = "ad13850a7245" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.execute("create schema integration") + op.create_table( + "integration_access", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("created_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("integration_types", sa.ARRAY(sa.String()), nullable=True), + sa.ForeignKeyConstraint(["created_by"], ["user.id"], ondelete="SET NULL"), + sa.ForeignKeyConstraint( + ["organization_id"], ["organization.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + schema="cognition", + ) + op.create_index( + op.f("ix_cognition_integration_access_created_by"), + "integration_access", + ["created_by"], + unique=False, + schema="cognition", + ) + op.create_index( + op.f("ix_cognition_integration_access_organization_id"), + "integration_access", + ["organization_id"], + unique=False, + schema="cognition", + ) + op.create_table( + "integration", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("created_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("started_at", sa.DateTime(), nullable=True), + sa.Column("finished_at", sa.DateTime(), nullable=True), + sa.Column("name", sa.String(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.Column("tokenizer", sa.String(), nullable=True), + sa.Column("state", sa.String(), nullable=True), + sa.Column("type", sa.String(), nullable=True), + sa.Column("config", sa.JSON(), nullable=True), + sa.Column("llm_config", sa.JSON(), nullable=True), + sa.Column("error_message", sa.String(), nullable=True), + sa.Column("is_synced", sa.Boolean(), nullable=True), + sa.Column("last_synced_at", sa.DateTime(), nullable=True), + sa.Column("delta_criteria", sa.JSON(), nullable=True), + sa.ForeignKeyConstraint(["created_by"], ["user.id"], ondelete="SET NULL"), + sa.ForeignKeyConstraint( + ["organization_id"], ["organization.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="SET NULL"), + sa.ForeignKeyConstraint(["updated_by"], ["user.id"], ondelete="SET NULL"), + sa.PrimaryKeyConstraint("id"), + schema="cognition", + ) + op.create_index( + op.f("ix_cognition_integration_created_by"), + "integration", + ["created_by"], + unique=False, + schema="cognition", + ) + op.create_index( + op.f("ix_cognition_integration_organization_id"), + "integration", + ["organization_id"], + unique=False, + schema="cognition", + ) + op.create_index( + op.f("ix_cognition_integration_project_id"), + "integration", + ["project_id"], + unique=False, + schema="cognition", + ) + op.create_index( + op.f("ix_cognition_integration_updated_by"), + "integration", + ["updated_by"], + unique=False, + schema="cognition", + ) + op.create_table( + "github_file", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("created_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("updated_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("integration_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("running_id", sa.Integer(), nullable=True), + sa.Column("source", sa.String(), nullable=True), + sa.Column("minio_file_name", sa.String(), nullable=True), + sa.Column("error_message", sa.String(), nullable=True), + sa.Column("path", sa.String(), nullable=True), + sa.Column("sha", sa.String(), nullable=True), + sa.Column("code_language", sa.String(), nullable=True), + sa.ForeignKeyConstraint(["created_by"], ["user.id"], ondelete="SET NULL"), + sa.ForeignKeyConstraint( + ["integration_id"], ["cognition.integration.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["updated_by"], ["user.id"], ondelete="SET NULL"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "integration_id", "running_id", "source", name="unique_github_file_source" + ), + schema="integration", + ) + op.create_index( + op.f("ix_integration_github_file_created_by"), + "github_file", + ["created_by"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_github_file_integration_id"), + "github_file", + ["integration_id"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_github_file_running_id"), + "github_file", + ["running_id"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_github_file_source"), + "github_file", + ["source"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_github_file_updated_by"), + "github_file", + ["updated_by"], + unique=False, + schema="integration", + ) + op.create_table( + "github_issue", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("created_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("updated_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("integration_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("running_id", sa.Integer(), nullable=True), + sa.Column("source", sa.String(), nullable=True), + sa.Column("minio_file_name", sa.String(), nullable=True), + sa.Column("error_message", sa.String(), nullable=True), + sa.Column("url", sa.String(), nullable=True), + sa.Column("state", sa.String(), nullable=True), + sa.Column("assignee", sa.String(), nullable=True), + sa.Column("milestone", sa.String(), nullable=True), + sa.Column("number", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["created_by"], ["user.id"], ondelete="SET NULL"), + sa.ForeignKeyConstraint( + ["integration_id"], ["cognition.integration.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["updated_by"], ["user.id"], ondelete="SET NULL"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "integration_id", "running_id", "source", name="unique_github_issue_source" + ), + schema="integration", + ) + op.create_index( + op.f("ix_integration_github_issue_created_by"), + "github_issue", + ["created_by"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_github_issue_integration_id"), + "github_issue", + ["integration_id"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_github_issue_running_id"), + "github_issue", + ["running_id"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_github_issue_source"), + "github_issue", + ["source"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_github_issue_updated_by"), + "github_issue", + ["updated_by"], + unique=False, + schema="integration", + ) + op.create_table( + "pdf", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("created_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("updated_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("integration_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("running_id", sa.Integer(), nullable=True), + sa.Column("source", sa.String(), nullable=True), + sa.Column("minio_file_name", sa.String(), nullable=True), + sa.Column("error_message", sa.String(), nullable=True), + sa.Column("file_path", sa.String(), nullable=True), + sa.Column("page", sa.Integer(), nullable=True), + sa.Column("total_pages", sa.Integer(), nullable=True), + sa.Column("title", sa.String(), nullable=True), + sa.ForeignKeyConstraint(["created_by"], ["user.id"], ondelete="SET NULL"), + sa.ForeignKeyConstraint( + ["integration_id"], ["cognition.integration.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["updated_by"], ["user.id"], ondelete="SET NULL"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "integration_id", "running_id", "source", name="unique_pdf_source" + ), + schema="integration", + ) + op.create_index( + op.f("ix_integration_pdf_created_by"), + "pdf", + ["created_by"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_pdf_integration_id"), + "pdf", + ["integration_id"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_pdf_running_id"), + "pdf", + ["running_id"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_pdf_source"), + "pdf", + ["source"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_pdf_updated_by"), + "pdf", + ["updated_by"], + unique=False, + schema="integration", + ) + op.create_table( + "sharepoint", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("created_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("updated_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("integration_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("running_id", sa.Integer(), nullable=True), + sa.Column("source", sa.String(), nullable=True), + sa.Column("minio_file_name", sa.String(), nullable=True), + sa.Column("error_message", sa.String(), nullable=True), + sa.Column("extension", sa.String(), nullable=True), + sa.Column("object_id", sa.String(), nullable=True), + sa.Column("parent_path", sa.String(), nullable=True), + sa.Column("name", sa.String(), nullable=True), + sa.Column("web_url", sa.String(), nullable=True), + sa.Column("sharepoint_created_by", sa.String(), nullable=True), + sa.Column("modified_by", sa.String(), nullable=True), + sa.Column("created", sa.DateTime(), nullable=True), + sa.Column("modified", sa.DateTime(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.Column("size", sa.Integer(), nullable=True), + sa.Column("mime_type", sa.String(), nullable=True), + sa.Column("hashes", sa.JSON(), nullable=True), + sa.Column("permissions", sa.JSON(), nullable=True), + sa.Column("file_properties", sa.JSON(), nullable=True), + sa.ForeignKeyConstraint(["created_by"], ["user.id"], ondelete="SET NULL"), + sa.ForeignKeyConstraint( + ["integration_id"], ["cognition.integration.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["updated_by"], ["user.id"], ondelete="SET NULL"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "integration_id", "running_id", "source", name="unique_sharepoint_source" + ), + schema="integration", + ) + op.create_index( + op.f("ix_integration_sharepoint_created_by"), + "sharepoint", + ["created_by"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_sharepoint_integration_id"), + "sharepoint", + ["integration_id"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_sharepoint_running_id"), + "sharepoint", + ["running_id"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_sharepoint_source"), + "sharepoint", + ["source"], + unique=False, + schema="integration", + ) + op.create_index( + op.f("ix_integration_sharepoint_updated_by"), + "sharepoint", + ["updated_by"], + unique=False, + schema="integration", + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index( + op.f("ix_integration_sharepoint_updated_by"), + table_name="sharepoint", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_sharepoint_source"), + table_name="sharepoint", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_sharepoint_running_id"), + table_name="sharepoint", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_sharepoint_integration_id"), + table_name="sharepoint", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_sharepoint_created_by"), + table_name="sharepoint", + schema="integration", + ) + op.drop_table("sharepoint", schema="integration") + op.drop_index( + op.f("ix_integration_pdf_updated_by"), table_name="pdf", schema="integration" + ) + op.drop_index( + op.f("ix_integration_pdf_source"), table_name="pdf", schema="integration" + ) + op.drop_index( + op.f("ix_integration_pdf_running_id"), table_name="pdf", schema="integration" + ) + op.drop_index( + op.f("ix_integration_pdf_integration_id"), + table_name="pdf", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_pdf_created_by"), table_name="pdf", schema="integration" + ) + op.drop_table("pdf", schema="integration") + op.drop_index( + op.f("ix_integration_github_issue_updated_by"), + table_name="github_issue", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_github_issue_source"), + table_name="github_issue", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_github_issue_running_id"), + table_name="github_issue", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_github_issue_integration_id"), + table_name="github_issue", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_github_issue_created_by"), + table_name="github_issue", + schema="integration", + ) + op.drop_table("github_issue", schema="integration") + op.drop_index( + op.f("ix_integration_github_file_updated_by"), + table_name="github_file", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_github_file_source"), + table_name="github_file", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_github_file_running_id"), + table_name="github_file", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_github_file_integration_id"), + table_name="github_file", + schema="integration", + ) + op.drop_index( + op.f("ix_integration_github_file_created_by"), + table_name="github_file", + schema="integration", + ) + op.drop_table("github_file", schema="integration") + op.drop_index( + op.f("ix_cognition_integration_updated_by"), + table_name="integration", + schema="cognition", + ) + op.drop_index( + op.f("ix_cognition_integration_project_id"), + table_name="integration", + schema="cognition", + ) + op.drop_index( + op.f("ix_cognition_integration_organization_id"), + table_name="integration", + schema="cognition", + ) + op.drop_index( + op.f("ix_cognition_integration_created_by"), + table_name="integration", + schema="cognition", + ) + op.drop_table("integration", schema="cognition") + op.drop_index( + op.f("ix_cognition_integration_access_organization_id"), + table_name="integration_access", + schema="cognition", + ) + op.drop_index( + op.f("ix_cognition_integration_access_created_by"), + table_name="integration_access", + schema="cognition", + ) + op.drop_table("integration_access", schema="cognition") + # ### end Alembic commands ### + op.execute("drop schema integration") diff --git a/alembic/versions/6868ac66ea92_adds_cognition_group_management.py b/alembic/versions/6868ac66ea92_adds_cognition_group_management.py new file mode 100644 index 00000000..e32235c9 --- /dev/null +++ b/alembic/versions/6868ac66ea92_adds_cognition_group_management.py @@ -0,0 +1,106 @@ +"""adds cognition group management + +Revision ID: 6868ac66ea92 +Revises: 36f087da55b1 +Create Date: 2025-06-26 12:58:16.408919 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "6868ac66ea92" +down_revision = "36f087da55b1" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "group", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("name", sa.String(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("created_by", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("meta_data", sa.JSON(), nullable=True), + sa.ForeignKeyConstraint(["created_by"], ["user.id"], ondelete="SET NULL"), + sa.ForeignKeyConstraint( + ["organization_id"], ["organization.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + schema="cognition", + ) + op.create_index( + op.f("ix_cognition_group_created_by"), + "group", + ["created_by"], + unique=False, + schema="cognition", + ) + op.create_index( + op.f("ix_cognition_group_organization_id"), + "group", + ["organization_id"], + unique=False, + schema="cognition", + ) + op.create_table( + "group_member", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("group_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint( + ["group_id"], ["cognition.group.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["user_id"], ["user.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + schema="cognition", + ) + op.create_index( + op.f("ix_cognition_group_member_group_id"), + "group_member", + ["group_id"], + unique=False, + schema="cognition", + ) + op.create_index( + op.f("ix_cognition_group_member_user_id"), + "group_member", + ["user_id"], + unique=False, + schema="cognition", + ) + op.add_column("user", sa.Column("oidc_identifier", sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("user", "oidc_identifier") + op.drop_index( + op.f("ix_cognition_group_member_user_id"), + table_name="group_member", + schema="cognition", + ) + op.drop_index( + op.f("ix_cognition_group_member_group_id"), + table_name="group_member", + schema="cognition", + ) + op.drop_table("group_member", schema="cognition") + op.drop_index( + op.f("ix_cognition_group_organization_id"), + table_name="group", + schema="cognition", + ) + op.drop_index( + op.f("ix_cognition_group_created_by"), table_name="group", schema="cognition" + ) + op.drop_table("group", schema="cognition") + # ### end Alembic commands ### diff --git a/api/transfer.py b/api/transfer.py index d80b2b1c..1baf4b39 100644 --- a/api/transfer.py +++ b/api/transfer.py @@ -1,6 +1,7 @@ import logging import traceback import time +import os from typing import Optional from starlette.endpoints import HTTPEndpoint from starlette.responses import PlainTextResponse @@ -16,6 +17,8 @@ tokenization, project, ) +from submodules.model.cognition_objects import integration +from util import service_requests from controller.transfer import manager as transfer_manager from controller.upload_task import manager as upload_task_manager @@ -36,6 +39,10 @@ logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) +COGNITION_INTEGRATION_PROVIDER = os.getenv( + "COGNITION_INTEGRATION_PROVIDER", "http://cognition-integration-provider:80" +) + class Notify(HTTPEndpoint): async def post(self, request) -> PlainTextResponse: @@ -165,12 +172,22 @@ def __recalculate_missing_attributes_and_embeddings( project_id: str, user_id: str ) -> None: __calculate_missing_attributes(project_id, user_id) - recreate_or_extend_embeddings(project_id) + has_embeddings = recreate_or_extend_embeddings(project_id) + if not has_embeddings: + return + + integration_items = integration.get_all_by_project_id(project_id) + if not integration_items: + return + + for integration_item in integration_items: + integration_id = str(integration_item.id) + url = f"{COGNITION_INTEGRATION_PROVIDER}/integrations/postprocess/{integration_id}" + service_requests.post_call_or_raise(url, data=None) def __calculate_missing_attributes(project_id: str, user_id: str) -> None: # wait a second to ensure that the process is started in the tokenization service - time.sleep(5) attributes_usable = attribute.get_all_ordered( project_id, True, @@ -186,6 +203,7 @@ def __calculate_missing_attributes(project_id: str, user_id: str) -> None: for att_id in attribute_ids: attribute.update(project_id, att_id, state=enums.AttributeState.INITIAL.value) general.commit() + time.sleep(5) notification.send_organization_update( project_id=project_id, message="calculate_attribute:started:all" ) @@ -196,7 +214,7 @@ def __calculate_missing_attributes(project_id: str, user_id: str) -> None: i += 1 if i >= 60: i = 0 - daemon.reset_session_token_in_thread() + general.remove_and_refresh_session(request_new=True) if tokenization.is_doc_bin_creation_running_or_queued(project_id): time.sleep(2) continue @@ -211,7 +229,7 @@ def __calculate_missing_attributes(project_id: str, user_id: str) -> None: break if i >= 60: i = 0 - daemon.reset_session_token_in_thread() + general.remove_and_refresh_session(request_new=True) current_att_id = attribute_ids[0] current_att = attribute.get(project_id, current_att_id) diff --git a/app.py b/app.py index d64c37ed..a0130e56 100644 --- a/app.py +++ b/app.py @@ -16,6 +16,7 @@ from fast_api.routes.organization import router as org_router from fast_api.routes.project import router as project_router from fast_api.routes.project_setting import router as project_setting_router +from fast_api.routes.project_internal import router as project_internal_router from fast_api.routes.misc import router as misc_router from fast_api.routes.comment import router as comment_router from fast_api.routes.attribute import router as attribute_router @@ -43,6 +44,7 @@ PREFIX_ORGANIZATION, PREFIX_PROJECT, PREFIX_PROJECT_SETTING, + PREFIX_PROJECT_INTERNAL, PREFIX_MISC, PREFIX_COMMENT, PREFIX_ATTRIBUTE, @@ -121,6 +123,9 @@ fastapi_app_internal.include_router( record_internal_router, prefix=PREFIX_RECORD_INTERNAL, tags=["record-internal"] ) +fastapi_app_internal.include_router( + project_internal_router, prefix=PREFIX_PROJECT_INTERNAL, tags=["project-internal"] +) routes = [ diff --git a/controller/auth/kratos.py b/controller/auth/kratos.py index 22e51c1d..178b29bb 100644 --- a/controller/auth/kratos.py +++ b/controller/auth/kratos.py @@ -136,7 +136,7 @@ def get_userid_from_mail(user_mail: str) -> str: if values[key]["simple"]["mail"] == user_mail: return key # not in cached values, try search kratos - return __search_kratos_for_user_mail(user_mail) + return __search_kratos_for_user_mail(user_mail)["id"] def __search_kratos_for_user_mail(user_mail: str) -> str: @@ -147,7 +147,7 @@ def __search_kratos_for_user_mail(user_mail: str) -> str: identities = request.json() for i in identities: if i["traits"]["email"].lower() == user_mail.lower(): - return i["id"] + return i return None diff --git a/controller/auth/manager.py b/controller/auth/manager.py index dd0df0c9..e7a1c31b 100644 --- a/controller/auth/manager.py +++ b/controller/auth/manager.py @@ -208,3 +208,11 @@ def check_valid_emails(emails: List[str]): def is_valid_email(email: str) -> bool: return bool(EMAIL_RE.fullmatch(email)) + + +def check_group_auth(request: Request): + user_item = get_user_by_info(request.state.info) + if not user_item: + raise AuthManagerError(status_code=404, detail="User not found") + if not user_item.role == enums.UserRoles.ENGINEER.value: + raise AuthManagerError(status_code=403, detail="User not authorized") diff --git a/controller/embedding/manager.py b/controller/embedding/manager.py index 4a554ef5..f15cf777 100644 --- a/controller/embedding/manager.py +++ b/controller/embedding/manager.py @@ -102,13 +102,16 @@ def get_embedding_name( def recreate_or_extend_embeddings( project_id: str, embedding_ids: Optional[List[str]] = None, user_id: str = None -) -> None: +) -> bool: if not embedding_ids: embeddings = embedding.get_all_embeddings_by_project_id(project_id) if len(embeddings) == 0: - return + return False embedding_ids = [str(embed.id) for embed in embeddings] + if len(embedding_ids) == 0: + return False + set_to_wait = False for embedding_id in embedding_ids: set_to_wait = True @@ -157,6 +160,7 @@ def recreate_or_extend_embeddings( notification.send_organization_update( project_id=project_id, message="embedding:finished:all" ) + return True def __handle_failed_embedding( diff --git a/controller/monitor/manager.py b/controller/monitor/manager.py index 289f65ce..f4bdd43b 100644 --- a/controller/monitor/manager.py +++ b/controller/monitor/manager.py @@ -115,3 +115,14 @@ def cancel_parse_cognition_file_task( transformation_key, with_commit=True, ) + + +def cancel_integration_task( + task_info: Dict[str, Any], +) -> None: + + integration_id = task_info.get("integrationId") + + task_monitor.set_integration_task_to_failed( + integration_id, error_message="Cancelled by task manager" + ) diff --git a/controller/project/manager.py b/controller/project/manager.py index ff440877..78c91c1f 100644 --- a/controller/project/manager.py +++ b/controller/project/manager.py @@ -2,7 +2,7 @@ import os import shutil import time -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Any from controller.transfer import project_transfer_manager as handler from controller.labeling_access_link import manager as link_manager @@ -15,16 +15,24 @@ data_slice, information_source, general, + attribute, + embedding, ) from submodules.model import daemon from fast_api.types import HuddleData, ProjectSize from controller.task_master import manager as task_master_manager from submodules.model.enums import TaskType, RecordTokenizationScope from submodules.model.business_objects import util as db_util +from submodules.model.integration_objects.helper import ( + REFINERY_ATTRIBUTE_ACCESS_GROUPS, + REFINERY_ATTRIBUTE_ACCESS_USERS, +) from submodules.s3 import controller as s3 from service.search import search from controller.auth import kratos from submodules.model.util import sql_alchemy_to_dict +from controller.embedding import connector + ALL_PROJECTS_WHITELIST = { "id", @@ -53,6 +61,78 @@ def get_all_projects(organization_id: str) -> List[Project]: return project.get_all(organization_id) +def get_all_projects_with_access_management( + organization_id: str, +) -> List[Dict[str, Any]]: + all_projects = project.get_all_with_access_management(organization_id) + all_projects_dict = sql_alchemy_to_dict(all_projects) + return all_projects_dict + + +def activate_access_management(project_id: str) -> None: + relative_position = attribute.get_relative_position(project_id) + if relative_position is None: + relative_position = 1 + else: + relative_position += 1 + filter_attributes = [ + REFINERY_ATTRIBUTE_ACCESS_GROUPS, + REFINERY_ATTRIBUTE_ACCESS_USERS, + ] + attribute.create( + project_id=project_id, + relative_position=relative_position, + name=filter_attributes[0], + data_type=enums.DataTypes.PERMISSION.value, + user_created=False, + visibility=enums.AttributeVisibility.HIDE.value, + with_commit=True, + state=enums.AttributeState.AUTOMATICALLY_CREATED.value, + ) + attribute.create( + project_id=project_id, + relative_position=relative_position + 1, + name=filter_attributes[1], + data_type=enums.DataTypes.PERMISSION.value, + user_created=False, + visibility=enums.AttributeVisibility.HIDE.value, + with_commit=True, + state=enums.AttributeState.AUTOMATICALLY_CREATED.value, + ) + all_embeddings = embedding.get_all_embeddings_by_project_id(project_id) + for embedding_item in all_embeddings: + prev_filter_attributes = embedding_item.filter_attributes or [] + new_filter_attributes = list(set(prev_filter_attributes + filter_attributes)) + embedding_item.filter_attributes = new_filter_attributes + if connector.update_attribute_payloads_for_neural_search( + project_id, str(embedding_item.id) + ): + embedding.update_embedding_filter_attributes( + project_id, str(embedding_item.id), new_filter_attributes + ) + general.commit() + + +def deactivate_access_management(project_id: str) -> None: + record.delete_access_management_attributes(project_id) + access_groups_attribute = attribute.get_by_name( + project_id, REFINERY_ATTRIBUTE_ACCESS_GROUPS + ) + access_users_attribute = attribute.get_by_name( + project_id, REFINERY_ATTRIBUTE_ACCESS_USERS + ) + if access_groups_attribute: + attribute.delete(project_id, access_groups_attribute.id, with_commit=True) + if access_users_attribute: + attribute.delete(project_id, access_users_attribute.id, with_commit=True) + + +def is_access_management_activated(project_id: str) -> bool: + access_groups = attribute.get_by_name(project_id, REFINERY_ATTRIBUTE_ACCESS_GROUPS) + access_users = attribute.get_by_name(project_id, REFINERY_ATTRIBUTE_ACCESS_USERS) + return access_groups is not None and access_users is not None + + def get_all_projects_by_user(organization_id) -> List[Project]: projects = project.get_all_by_user_organization_id(organization_id) project_dicts = sql_alchemy_to_dict( diff --git a/controller/record/manager.py b/controller/record/manager.py index 857ed576..1798d990 100644 --- a/controller/record/manager.py +++ b/controller/record/manager.py @@ -13,6 +13,11 @@ task_queue, record_label_association, comments, + project, +) +from submodules.model.integration_objects.helper import ( + REFINERY_ATTRIBUTE_ACCESS_GROUPS, + REFINERY_ATTRIBUTE_ACCESS_USERS, ) from service.search import search from submodules.model import enums @@ -29,6 +34,9 @@ from util import notification import time import traceback +from controller.embedding import connector +from submodules.model.cognition_objects import group as group_db +import json def get_record(project_id: str, record_id: str) -> Record: @@ -138,7 +146,10 @@ def get_unique_values_by_attributes(project_id: str) -> Dict[str, List[str]]: def edit_records( - user_id: str, project_id: str, changes: Dict[str, Any] + user_id: str, + project_id: str, + changes: Dict[str, Any], + only_access_management_update: Optional[bool] = False, ) -> Optional[List[str]]: prepped = __check_and_prep_edit_records(project_id, changes) if "errors_found" in prepped: @@ -164,22 +175,30 @@ def edit_records( record_label_association.delete_by_record_attribute_tuples(project_id, chunk) general.commit() - - try: - # tokenization currently with a complete rebuild of the docbins of touched records - # optimization possible by only rebuilding the changed record & attribute combinations and reuploading - tokenization.delete_record_docbins_by_id(project_id, records.keys(), True) - tokenization.delete_token_statistics_by_id(project_id, records.keys(), True) - tokenization_service.request_tokenize_project(project_id, user_id) - time.sleep(1) - # wait for tokenization to finish, the endpoint itself handles missing docbins - while tokenization.is_doc_bin_creation_running_or_queued(project_id): - time.sleep(0.5) - - except Exception: - __revert_record_data_changes(records, prepped["record_data_backup"]) - print(traceback.format_exc(), flush=True) - return ["tokenization failed"] + # TODO check if this is still needed for access management updates + if not only_access_management_update: + try: + # tokenization currently with a complete rebuild of the docbins of touched records + # optimization possible by only rebuilding the changed record & attribute combinations and reuploading + tokenization.delete_record_docbins_by_id(project_id, records.keys(), True) + tokenization.delete_token_statistics_by_id(project_id, records.keys(), True) + tokenization_service.request_tokenize_project(project_id, user_id) + time.sleep(1) + c = 0 + # wait for tokenization to finish, the endpoint itself handles missing docbins + while tokenization.is_doc_bin_creation_running_or_queued(project_id): + c += 1 + if c > 7200: + # fail-safe (e.g. max 2 h wait) to prevent infinite loop + raise RuntimeError( + "Failed to find a record tokenization task after 2h wait." + ) + time.sleep(0.5) + + except Exception: + __revert_record_data_changes(records, prepped["record_data_backup"]) + print(traceback.format_exc(), flush=True) + return ["tokenization failed"] try: embedding_connector.request_re_embed_records( @@ -191,8 +210,6 @@ def edit_records( print(traceback.format_exc(), flush=True) return ["embedding failed"] - return None - def __revert_record_data_changes( records: Dict[str, Record], data_backup: Dict[str, Any] @@ -331,6 +348,143 @@ def delete_records( __delete_records(project_id, record_ids) +def sync_access_groups_and_users_sharepoint( + project_id: str, + integration_id: str, + permissions_users: Dict[str, Any], + record_ids: Optional[List[str]], +) -> Optional[List[str]]: + try: + if record_ids: + project_records = record.get_by_record_ids(project_id, record_ids) + else: + project_records = record.get_all(project_id) + organization_id = project.get_org_id(project_id) + integration_groups_by_permission_id = ( + group_db.get_all_by_integration_id_permission_grouped( + organization_id, integration_id + ) + ) + record_change_dict = {} + for record_item in project_records: + if not record_item.data.get(REFINERY_ATTRIBUTE_ACCESS_GROUPS): + current_group_ids = [] + else: + current_group_ids = record_item.data[REFINERY_ATTRIBUTE_ACCESS_GROUPS] + + meta_data_dict = json.loads(record_item.data.get("metadata", "{}")) + permission_ids = meta_data_dict.get("permissions") + new_group_ids = [ + str(integration_groups_by_permission_id.get(permission_id).id) + for permission_id in permission_ids + if integration_groups_by_permission_id.get(permission_id) + ] + # Only update if new group ids differ from current group ids + if set(new_group_ids).difference(current_group_ids): + record_change_dict[ + f"{str(record_item.id)}@{REFINERY_ATTRIBUTE_ACCESS_GROUPS}" + ] = { + "attributeName": REFINERY_ATTRIBUTE_ACCESS_GROUPS, + "newValue": new_group_ids, + "recordId": str(record_item.id), + } + if not record_item.data.get(REFINERY_ATTRIBUTE_ACCESS_USERS): + current_user_ids = [] + else: + current_user_ids = record_item.data[REFINERY_ATTRIBUTE_ACCESS_USERS] + new_user_ids = [ + user_id + for permission_id in permission_ids + if permissions_users.get(permission_id) + for user_id in permissions_users.get(permission_id) + ] + # Only update if new user ids differ from current user ids + if set(new_user_ids).difference(current_user_ids): + extended_user_ids = new_user_ids + record_change_dict[ + f"{str(record_item.id)}@{REFINERY_ATTRIBUTE_ACCESS_USERS}" + ] = { + "attributeName": REFINERY_ATTRIBUTE_ACCESS_USERS, + "newValue": extended_user_ids, + "recordId": str(record_item.id), + } + changed_records_ids = [ + record_change_dict[key]["recordId"] for key in record_change_dict + ] + partial_update = len(changed_records_ids) < len(project_records) + errors = edit_records(None, project_id, record_change_dict, True) + if not errors: + all_embeddings = embedding.get_all_embeddings_by_project_id(project_id) + for embedding_item in all_embeddings: + connector.update_attribute_payloads_for_neural_search( + project_id, + str(embedding_item.id), + record_ids=changed_records_ids if partial_update else None, + ) + return errors + + except Exception as e: + print(traceback.format_exc(), flush=True) + return [str(e)] + + +def add_access_groups_or_users( + project_id: str, + record_ids: List[str], + group_ids: Optional[List[str]] = None, + user_ids: Optional[List[str]] = None, +) -> Optional[List[str]]: + try: + if not record_ids or len(record_ids) == 0: + return + record_change_dict = {} + records_to_change = record.get_by_record_ids(project_id, record_ids) + if group_ids and len(group_ids) > 0: + for record_item in records_to_change: + if not record_item.data.get(REFINERY_ATTRIBUTE_ACCESS_GROUPS): + current_group_ids = [] + else: + current_group_ids = record_item.data[ + REFINERY_ATTRIBUTE_ACCESS_GROUPS + ] + extended_group_ids = list( + set(current_group_ids).union(group_ids) + ) # remove duplicates + record_change_dict[ + f"{str(record_item.id)}@{REFINERY_ATTRIBUTE_ACCESS_GROUPS}" + ] = { + "attributeName": REFINERY_ATTRIBUTE_ACCESS_GROUPS, + "newValue": extended_group_ids, + "recordId": str(record_item.id), + } + if user_ids and len(user_ids) > 0: + for record_item in records_to_change: + if not record_item.data.get(REFINERY_ATTRIBUTE_ACCESS_USERS): + current_user_ids = [] + else: + current_user_ids = record_item.data[REFINERY_ATTRIBUTE_ACCESS_USERS] + extended_user_ids = list(set(current_user_ids).union(user_ids)) + record_change_dict[ + f"{str(record_item.id)}@{REFINERY_ATTRIBUTE_ACCESS_USERS}" + ] = { + "attributeName": REFINERY_ATTRIBUTE_ACCESS_USERS, + "newValue": extended_user_ids, + "recordId": str(record_item.id), + } + # maybe wait for embedding to finish first? + errors = edit_records(None, project_id, record_change_dict, True) + if not errors: + all_embeddings = embedding.get_all_embeddings_by_project_id(project_id) + for embedding_item in all_embeddings: + connector.update_attribute_payloads_for_neural_search( + project_id, str(embedding_item.id) + ) + return errors + except Exception as e: + print(traceback.format_exc(), flush=True) + return [str(e)] + + def __delete_records(project_id: str, record_ids: List[str]) -> None: try: row_count = record.delete_many(project_id, record_ids) diff --git a/controller/transfer/checks.py b/controller/transfer/checks.py index bff43101..af5e47b9 100644 --- a/controller/transfer/checks.py +++ b/controller/transfer/checks.py @@ -70,13 +70,12 @@ def run_checks(df: pd.DataFrame, project_id, user_id) -> None: # check attribute equality attribute_entities = attribute.get_all( project_id, - state_filter=[ - AttributeState.UPLOADED.value, - AttributeState.AUTOMATICALLY_CREATED.value, - ], + state_filter=[AttributeState.UPLOADED.value], ) - attribute_names = [attribute_item.name for attribute_item in attribute_entities] - differences = set(attribute_names).difference(set(attributes)) + attribute_names = [attribute_item.name for attribute_item in attribute_entities] + [ + attribute.get_running_id_name(project_id) + ] + differences = set(filter(None, attribute_names)).difference(set(attributes)) if differences: guard = True diff --git a/controller/user/manager.py b/controller/user/manager.py index a00a7b22..5d1af799 100644 --- a/controller/user/manager.py +++ b/controller/user/manager.py @@ -162,4 +162,22 @@ def __migrate_kratos_users(): if user_database.sso_provider != sso_provider: user_database.sso_provider = sso_provider + if user_database.oidc_identifier is None: + user_search = kratos.__search_kratos_for_user_mail( + user_identity["traits"]["email"] + ) + if user_search and user_search["credentials"]: + if user_search["credentials"].get("oidc", None): + oidc = ( + user_search["credentials"] + .get("oidc", {}) + .get("identifiers", None)[0] + ) + if oidc: + oidc = oidc.split(":") + if len(oidc) > 1: + user_database.oidc_identifier = oidc[1] + else: + user_database.oidc_identifier = None + general.commit() diff --git a/fast_api/models.py b/fast_api/models.py index b8fd13db..bd667a9b 100644 --- a/fast_api/models.py +++ b/fast_api/models.py @@ -514,3 +514,17 @@ class AdminQueryFilterBody(BaseModel): class RecordDeletion(BaseModel): record_ids: List[str] + + +class ProjectDeletion(BaseModel): + user_id: str + project_ids: List[str] + + +class GetEmbeddingNameBody(BaseModel): + org_id: str + attribute_id: str + platform: str + embedding_type: str + model: Optional[str] = None + api_token_env_name: Optional[str] = None diff --git a/fast_api/routes/embedding.py b/fast_api/routes/embedding.py index c18bb41f..d482adec 100644 --- a/fast_api/routes/embedding.py +++ b/fast_api/routes/embedding.py @@ -1,6 +1,10 @@ from typing import List, Optional -from fast_api.models import CreateEmbeddingBody, UpdateEmbeddingBody +from fast_api.models import ( + CreateEmbeddingBody, + UpdateEmbeddingBody, + GetEmbeddingNameBody, +) from fast_api.routes.client_response import ( pack_json_result, get_silent_success, @@ -13,6 +17,7 @@ from controller.embedding.connector import collection_on_qdrant from submodules.model.enums import TaskType from submodules.model.business_objects import embedding +from submodules.model.cognition_objects import environment_variable as env_var_db_bo from submodules.model.util import sql_alchemy_to_dict from util import notification, spacy_util import json @@ -169,3 +174,23 @@ def update_embedding_payload( return get_silent_success() else: return GENERIC_FAILURE_RESPONSE + + +# should really be a GET, but is only used internally by cognition-integration-provider +@router.post("/{project_id}/embedding-name") +def get_embedding_name( + project_id: str, + data: GetEmbeddingNameBody = Body(...), +) -> str: + if data.api_token_env_name: + api_token = env_var_db_bo.get_by_name_and_org_id( + data.org_id, data.api_token_env_name + ).value + return manager.get_embedding_name( + project_id, + data.attribute_id, + data.platform, + data.embedding_type, + data.model, + api_token, + ) diff --git a/fast_api/routes/misc.py b/fast_api/routes/misc.py index de02b54a..9ad178c8 100644 --- a/fast_api/routes/misc.py +++ b/fast_api/routes/misc.py @@ -134,6 +134,8 @@ def cancel_task( controller_manager.cancel_parse_cognition_file_task( task_entity.organization_id, task_info ) + elif task_type == enums.TaskType.EXECUTE_INTEGRATION.value: + controller_manager.cancel_integration_task(task_info) else: raise ValueError(f"{task_type} is no valid task type") diff --git a/fast_api/routes/project.py b/fast_api/routes/project.py index fc7fac5f..0719dbd7 100644 --- a/fast_api/routes/project.py +++ b/fast_api/routes/project.py @@ -9,7 +9,6 @@ ) from fastapi import APIRouter, Body, Depends, Request from fast_api.routes.client_response import get_silent_success, pack_json_result -from typing import Dict from controller.auth import manager as auth_manager from controller.upload_task import manager as upload_task_manager from submodules.model.business_objects import labeling_task @@ -55,23 +54,64 @@ "/{project_id}/project-by-project-id", dependencies=[Depends(auth_manager.check_project_access_dep)], ) -def get_project_by_project_id( - project_id: str, -) -> Dict: +def get_project_by_project_id(project_id: str): data = get_project_by_project_id_sql(project_id) return pack_json_result(data) @router.get("/all-projects") -def get_all_projects(request: Request) -> Dict: +def get_all_projects(request: Request): projects = manager.get_all_projects_by_user( auth_manager.get_organization_id_by_info(request.state.info) ) return pack_json_result(projects) +@router.get("/all-projects-with-access-management") +def get_all_projects_with_tokens(request: Request): + org_id = auth_manager.get_organization_id_by_info(request.state.info) + projects_with_access_management = manager.get_all_projects_with_access_management( + org_id + ) + return pack_json_result(projects_with_access_management) + + +@router.post( + "/{project_id}/access-management", + dependencies=[ + Depends(auth_manager.check_project_access_dep), + Depends(auth_manager.check_group_auth), + ], +) +def activate_access_management( + request: Request, + project_id: str, +): + if manager.is_access_management_activated(project_id): + return get_silent_success() + manager.activate_access_management(project_id) + return get_silent_success() + + +@router.delete( + "/{project_id}/access-management", + dependencies=[ + Depends(auth_manager.check_project_access_dep), + Depends(auth_manager.check_group_auth), + ], +) +def deactivate_access_management( + request: Request, + project_id: str, +): + if not manager.is_access_management_activated(project_id): + return get_silent_success() + manager.deactivate_access_management(project_id) + return get_silent_success() + + @router.get("/all-projects-mini") -def get_all_projects_mini(request: Request) -> Dict: +def get_all_projects_mini(request: Request): projects = manager.get_all_projects_by_user( auth_manager.get_organization_id_by_info(request.state.info) ) @@ -85,7 +125,6 @@ def get_all_projects_mini(request: Request) -> Dict: } for project in projects ] - return pack_json_result(project_extended) @@ -97,7 +136,7 @@ def general_project_stats( project_id: str, labeling_task_id: Optional[str] = None, slice_id: Optional[str] = None, -) -> Dict: +): data = manager.get_general_project_stats(project_id, labeling_task_id, slice_id) return pack_json_result( data, @@ -113,7 +152,7 @@ def label_distribution( project_id: str, labeling_task_id: Optional[str] = None, slice_id: Optional[str] = None, -) -> str: +): data = manager.get_label_distribution(project_id, labeling_task_id, slice_id) return pack_json_result( data, @@ -125,7 +164,7 @@ def label_distribution( "/{project_id}/project-tokenization", dependencies=[Depends(auth_manager.check_project_access_dep)], ) -def project_tokenization(project_id: str) -> str: +def project_tokenization(project_id: str): waiting_task = task_queue.get_by_tokenization(project_id) data = None if waiting_task and not waiting_task.is_active: @@ -150,7 +189,7 @@ def project_tokenization(project_id: str) -> str: "/{project_id}/labeling-tasks-by-project-id", dependencies=[Depends(auth_manager.check_project_access_dep)], ) -def labeling_tasks_by_project_id(project_id: str) -> str: +def labeling_tasks_by_project_id(project_id: str): data = labeling_task.get_labeling_tasks_by_project_id_full(project_id) return pack_json_result(data) @@ -159,13 +198,13 @@ def labeling_tasks_by_project_id(project_id: str) -> str: "/{project_id}/record-export-by-project-id", dependencies=[Depends(auth_manager.check_project_access_dep)], ) -def record_export_by_project_id(project_id: str) -> str: +def record_export_by_project_id(project_id: str): data = manager.get_project_with_labeling_tasks_info_attributes(project_id) return pack_json_result(data) @router.get("/model-provider-info") -def get_model_provider_info(request: Request) -> Dict: +def get_model_provider_info(request: Request): data = model_manager.get_model_provider_info() return pack_json_result(data) @@ -177,7 +216,7 @@ def get_model_provider_info(request: Request) -> Dict: def last_export_credentials( request: Request, project_id: str, -) -> Dict: +): data = transfer_manager.last_project_export_credentials(project_id) return pack_json_result(data) @@ -213,7 +252,7 @@ def upload_task_by_id( request: Request, project_id: str, upload_task_id: str, -) -> Dict: +): if upload_task_id.find("/") != -1: upload_task_id = upload_task_id.split("/")[-1] data = upload_task_manager.get_upload_task(project_id, upload_task_id) diff --git a/fast_api/routes/project_internal.py b/fast_api/routes/project_internal.py new file mode 100644 index 00000000..4f85fe5d --- /dev/null +++ b/fast_api/routes/project_internal.py @@ -0,0 +1,34 @@ +from fastapi import APIRouter, Request + +from controller.project import manager +from fast_api.models import ProjectDeletion +from fast_api.routes.client_response import get_silent_success +from util import notification + +from submodules.model import enums +from submodules.model.business_objects import ( + notification as notification_model, + user as user_db_bo, +) + +router = APIRouter() + + +@router.delete( + "/delete-projects", +) +def delete_projects(request: Request, data: ProjectDeletion): + for project_id in data.project_ids: + manager.update_project(project_id, status=enums.ProjectStatus.IN_DELETION.value) + user = user_db_bo.get(data.user_id) + project_item = manager.get_project(project_id) + organization_id = str(project_item.organization_id) + notification.create_notification( + enums.NotificationType.PROJECT_DELETED, user.id, None, project_item.name + ) + notification_model.remove_project_connection_for_last_x(project_id) + manager.delete_project(project_id) + notification.send_organization_update( + project_id, f"project_deleted:{project_id}:{user.id}", True, organization_id + ) + return get_silent_success() diff --git a/fast_api/routes/record.py b/fast_api/routes/record.py index 4c2ce276..4758fe93 100644 --- a/fast_api/routes/record.py +++ b/fast_api/routes/record.py @@ -47,3 +47,29 @@ def delete_by_record_ids( ): manager.delete_records(project_id, body.record_ids, as_thread) return get_silent_success() + + +@router.post( + "/{project_id}/access-management", + dependencies=[ + Depends(auth_manager.check_project_access_dep), + Depends(auth_manager.check_group_auth), + ], +) +def add_access_groups_or_users( + request: Request, + project_id: str, + body: dict = Body(...), +): + group_ids = body.get("group_ids") + user_ids = body.get("user_ids") + record_ids = body.get("record_ids") + errors = manager.add_access_groups_or_users( + project_id, record_ids, group_ids=group_ids, user_ids=user_ids + ) + if errors and len(errors) > 0: + return get_custom_response( + status_code=status.HTTP_200_OK, + content=json.dumps(errors), + ) + return get_silent_success() diff --git a/fast_api/routes/record_internal.py b/fast_api/routes/record_internal.py index 703a2810..e0847e6c 100644 --- a/fast_api/routes/record_internal.py +++ b/fast_api/routes/record_internal.py @@ -1,10 +1,13 @@ from typing import Optional -from fastapi import APIRouter -from controller.record import manager +from fastapi import APIRouter, Body, status +from controller.record import manager as record_manager +from controller.project import manager as project_manager from fast_api.routes.client_response import ( - get_silent_success, + get_custom_response, + get_silent_success ) from fast_api.models import RecordDeletion +import json router = APIRouter() @@ -15,5 +18,28 @@ def delete_by_record_ids( body: RecordDeletion, as_thread: Optional[bool] = False, ): - manager.delete_records(project_id, body.record_ids, as_thread) + record_manager.delete_records(project_id, body.record_ids, as_thread) return get_silent_success() + + +@router.post( + "/{project_id}/access-management/sharepoint" +) +def sync_access_groups_and_users_sharepoint( + project_id: str, + body: dict = Body(...), +): + record_ids = body.get("record_ids") + integration_id = body.get("integration_id") + permissions_user = body.get("permissions_user", {}) + + if not project_manager.is_access_management_activated(project_id): + project_manager.activate_access_management(project_id) + + errors = record_manager.sync_access_groups_and_users_sharepoint(project_id, integration_id, permissions_user, record_ids) + if errors and len(errors) > 0: + return get_custom_response( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=json.dumps(errors), + ) + return get_silent_success() \ No newline at end of file diff --git a/route_prefix.py b/route_prefix.py index 96918c8c..890fab2d 100644 --- a/route_prefix.py +++ b/route_prefix.py @@ -2,6 +2,7 @@ PREFIX_ORGANIZATION = PREFIX + "/organization" PREFIX_PROJECT = PREFIX + "/project" PREFIX_PROJECT_SETTING = PREFIX + "/project-setting" +PREFIX_PROJECT_INTERNAL = PREFIX + "/project" PREFIX_MISC = PREFIX + "/misc" PREFIX_COMMENT = PREFIX + "/comment" PREFIX_ATTRIBUTE = PREFIX + "/attribute" diff --git a/submodules/model b/submodules/model index 4cdfbd24..19c0a4d2 160000 --- a/submodules/model +++ b/submodules/model @@ -1 +1 @@ -Subproject commit 4cdfbd240114f22ba493d9a552b812499e0c5298 +Subproject commit 19c0a4d25233fa0a7d5c4ee5377954c0594d2750