diff --git a/alembic/manual_migrations/migrate_non_api_like_constants.py b/alembic/manual_migrations/migrate_non_api_like_constants.py new file mode 100644 index 00000000..e59d5f3c --- /dev/null +++ b/alembic/manual_migrations/migrate_non_api_like_constants.py @@ -0,0 +1,40 @@ +import sqlalchemy as sa +from sqlalchemy.orm import Session, configure_mappers + +from mavedb.models import * +from mavedb.models.enums.target_category import TargetCategory +from mavedb.models.target_gene import TargetGene + +from mavedb.db.session import SessionLocal + +configure_mappers() + +def api_like_target_gene_category(category: str): + if category == "Protein coding": + return TargetCategory.protein_coding + elif category == "Other noncoding": + return TargetCategory.other_noncoding + elif category == "Regulatory": + return TargetCategory.regulatory + else: + raise ValueError() + + +def do_migration(db: Session): + target_genes = db.scalars(sa.select(TargetGene)).all() + + for target in target_genes: + target.category = api_like_target_gene_category(target.category) + db.add(target) + + db.commit() + + +if __name__ == "__main__": + db = SessionLocal() + db.current_user = None # type: ignore + + do_migration(db) + + db.commit() + db.close() diff --git a/alembic/manual_migrations/refresh_published_tmp_urns.py b/alembic/manual_migrations/refresh_published_tmp_urns.py new file mode 100644 index 00000000..9fe4a4b4 --- /dev/null +++ b/alembic/manual_migrations/refresh_published_tmp_urns.py @@ -0,0 +1,33 @@ +import sqlalchemy as sa +from sqlalchemy.orm import Session, configure_mappers + +from mavedb.models import * + +from mavedb.lib.score_sets import refresh_variant_urns + +from mavedb.models.score_set import ScoreSet +from mavedb.models.variant import Variant + +from mavedb.db.session import SessionLocal + +configure_mappers() + + +def do_migration(db: Session): + published_score_sets_with_associated_tmp_variants: sa.ScalarResult[str] + published_score_sets_with_associated_tmp_variants = db.execute( + sa.select(sa.distinct(ScoreSet.urn)).join(Variant).where(ScoreSet.published_date.is_not(None), Variant.urn.like("%tmp:%")) + ).scalars() + + for score_set_urn in published_score_sets_with_associated_tmp_variants: + refresh_variant_urns(db, db.execute(sa.select(ScoreSet).where(ScoreSet.urn == score_set_urn)).scalar_one()) + + +if __name__ == "__main__": + db = SessionLocal() + db.current_user = None # type: ignore + + do_migration(db) + + db.commit() + db.close() diff --git a/alembic/versions/03c7124c33e1_target_category_enum.py b/alembic/versions/03c7124c33e1_target_category_enum.py new file mode 100644 index 00000000..ce9720ac --- /dev/null +++ b/alembic/versions/03c7124c33e1_target_category_enum.py @@ -0,0 +1,54 @@ +"""Target category enum + +Revision ID: 03c7124c33e1 +Revises: 2b6f40ea2fb6 +Create Date: 2024-11-01 11:27:03.609116 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "03c7124c33e1" +down_revision = "2b6f40ea2fb6" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "target_genes", + "category", + type_=sa.Enum( + "protein_coding", + "other_noncoding", + "regulatory", + name="targetcategory", + native_enum=False, + create_constraint=True, + length=32, + ), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "target_genes", + "category", + type_=sa.String(), + existing_type=sa.Enum( + "protein_coding", + "other_noncoding", + "regulatory", + name="targetcategory", + native_enum=False, + create_constraint=True, + length=32, + ), + ) + # ### end Alembic commands ### diff --git a/alembic/versions/2b6f40ea2fb6_add_score_range_column.py b/alembic/versions/2b6f40ea2fb6_add_score_range_column.py index 099a7634..0f69eede 100644 --- a/alembic/versions/2b6f40ea2fb6_add_score_range_column.py +++ b/alembic/versions/2b6f40ea2fb6_add_score_range_column.py @@ -1,7 +1,7 @@ """Add score range column Revision ID: 2b6f40ea2fb6 -Revises: 1d4933b4b6f7 +Revises: 1cee01c42909 Create Date: 2024-09-09 12:25:33.180077 """ diff --git a/alembic/versions/68a0ec57694e_add_active_column_to_licenses.py b/alembic/versions/68a0ec57694e_add_active_column_to_licenses.py new file mode 100644 index 00000000..3ced2b3b --- /dev/null +++ b/alembic/versions/68a0ec57694e_add_active_column_to_licenses.py @@ -0,0 +1,29 @@ +"""Add active column to licenses + +Revision ID: 68a0ec57694e +Revises: 03c7124c33e1 +Create Date: 2024-10-22 15:36:41.868909 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "68a0ec57694e" +down_revision = "03c7124c33e1" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column("licenses", sa.Column("active", sa.Boolean(), nullable=False, server_default=sa.true())) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("licenses", "active") + # ### end Alembic commands ### diff --git a/poetry.lock b/poetry.lock index 38fcd06a..a06e32e7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -20,13 +20,13 @@ tz = ["python-dateutil"] [[package]] name = "anyio" -version = "4.6.0" +version = "4.6.2.post1" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" files = [ - {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, - {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, ] [package.dependencies] @@ -37,7 +37,7 @@ typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -668,13 +668,13 @@ crt = ["awscrt (==0.21.2)"] [[package]] name = "botocore-stubs" -version = "1.35.39" +version = "1.35.43" description = "Type annotations and code completion for botocore" optional = false python-versions = ">=3.8" files = [ - {file = "botocore_stubs-1.35.39-py3-none-any.whl", hash = "sha256:62b0518ea3056d76e00fa2b30a5de38b9c70eaf1058a2ed5f34bc208222f1b70"}, - {file = "botocore_stubs-1.35.39.tar.gz", hash = "sha256:0d628444a15b94fb7284cd3cc34ba6f6bb7a076a319992d2f19111c644de4dba"}, + {file = "botocore_stubs-1.35.43-py3-none-any.whl", hash = "sha256:b4c7fc22125dc05b3280b9b03ae7b826dab072cac4cc8f93566671988c60fcd7"}, + {file = "botocore_stubs-1.35.43.tar.gz", hash = "sha256:f72326ddc0d79baea615d26cbe87d07a6496469bd9d704a9cab8e7a79e6b0b22"}, ] [package.dependencies] @@ -991,73 +991,73 @@ test = ["pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytes [[package]] name = "coverage" -version = "7.6.2" +version = "7.6.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c9df1950fb92d49970cce38100d7e7293c84ed3606eaa16ea0b6bc27175bb667"}, - {file = "coverage-7.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:24500f4b0e03aab60ce575c85365beab64b44d4db837021e08339f61d1fbfe52"}, - {file = "coverage-7.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a663b180b6669c400b4630a24cc776f23a992d38ce7ae72ede2a397ce6b0f170"}, - {file = "coverage-7.6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfde025e2793a22efe8c21f807d276bd1d6a4bcc5ba6f19dbdfc4e7a12160909"}, - {file = "coverage-7.6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:087932079c065d7b8ebadd3a0160656c55954144af6439886c8bcf78bbbcde7f"}, - {file = "coverage-7.6.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9c6b0c1cafd96213a0327cf680acb39f70e452caf8e9a25aeb05316db9c07f89"}, - {file = "coverage-7.6.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6e85830eed5b5263ffa0c62428e43cb844296f3b4461f09e4bdb0d44ec190bc2"}, - {file = "coverage-7.6.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:62ab4231c01e156ece1b3a187c87173f31cbeee83a5e1f6dff17f288dca93345"}, - {file = "coverage-7.6.2-cp310-cp310-win32.whl", hash = "sha256:7b80fbb0da3aebde102a37ef0138aeedff45997e22f8962e5f16ae1742852676"}, - {file = "coverage-7.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:d20c3d1f31f14d6962a4e2f549c21d31e670b90f777ef4171be540fb7fb70f02"}, - {file = "coverage-7.6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bb21bac7783c1bf6f4bbe68b1e0ff0d20e7e7732cfb7995bc8d96e23aa90fc7b"}, - {file = "coverage-7.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b2e437fbd8fae5bc7716b9c7ff97aecc95f0b4d56e4ca08b3c8d8adcaadb84"}, - {file = "coverage-7.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:536f77f2bf5797983652d1d55f1a7272a29afcc89e3ae51caa99b2db4e89d658"}, - {file = "coverage-7.6.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f361296ca7054f0936b02525646b2731b32c8074ba6defab524b79b2b7eeac72"}, - {file = "coverage-7.6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7926d8d034e06b479797c199747dd774d5e86179f2ce44294423327a88d66ca7"}, - {file = "coverage-7.6.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0bbae11c138585c89fb4e991faefb174a80112e1a7557d507aaa07675c62e66b"}, - {file = "coverage-7.6.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fcad7d5d2bbfeae1026b395036a8aa5abf67e8038ae7e6a25c7d0f88b10a8e6a"}, - {file = "coverage-7.6.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f01e53575f27097d75d42de33b1b289c74b16891ce576d767ad8c48d17aeb5e0"}, - {file = "coverage-7.6.2-cp311-cp311-win32.whl", hash = "sha256:7781f4f70c9b0b39e1b129b10c7d43a4e0c91f90c60435e6da8288efc2b73438"}, - {file = "coverage-7.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:9bcd51eeca35a80e76dc5794a9dd7cb04b97f0e8af620d54711793bfc1fbba4b"}, - {file = "coverage-7.6.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ebc94fadbd4a3f4215993326a6a00e47d79889391f5659bf310f55fe5d9f581c"}, - {file = "coverage-7.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9681516288e3dcf0aa7c26231178cc0be6cac9705cac06709f2353c5b406cfea"}, - {file = "coverage-7.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d9c5d13927d77af4fbe453953810db766f75401e764727e73a6ee4f82527b3e"}, - {file = "coverage-7.6.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92f9ca04b3e719d69b02dc4a69debb795af84cb7afd09c5eb5d54b4a1ae2191"}, - {file = "coverage-7.6.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ff2ef83d6d0b527b5c9dad73819b24a2f76fdddcfd6c4e7a4d7e73ecb0656b4"}, - {file = "coverage-7.6.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:47ccb6e99a3031ffbbd6e7cc041e70770b4fe405370c66a54dbf26a500ded80b"}, - {file = "coverage-7.6.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a867d26f06bcd047ef716175b2696b315cb7571ccb951006d61ca80bbc356e9e"}, - {file = "coverage-7.6.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cdfcf2e914e2ba653101157458afd0ad92a16731eeba9a611b5cbb3e7124e74b"}, - {file = "coverage-7.6.2-cp312-cp312-win32.whl", hash = "sha256:f9035695dadfb397bee9eeaf1dc7fbeda483bf7664a7397a629846800ce6e276"}, - {file = "coverage-7.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:5ed69befa9a9fc796fe015a7040c9398722d6b97df73a6b608e9e275fa0932b0"}, - {file = "coverage-7.6.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eea60c79d36a8f39475b1af887663bc3ae4f31289cd216f514ce18d5938df40"}, - {file = "coverage-7.6.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa68a6cdbe1bc6793a9dbfc38302c11599bbe1837392ae9b1d238b9ef3dafcf1"}, - {file = "coverage-7.6.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ec528ae69f0a139690fad6deac8a7d33629fa61ccce693fdd07ddf7e9931fba"}, - {file = "coverage-7.6.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed5ac02126f74d190fa2cc14a9eb2a5d9837d5863920fa472b02eb1595cdc925"}, - {file = "coverage-7.6.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21c0ea0d4db8a36b275cb6fb2437a3715697a4ba3cb7b918d3525cc75f726304"}, - {file = "coverage-7.6.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:35a51598f29b2a19e26d0908bd196f771a9b1c5d9a07bf20be0adf28f1ad4f77"}, - {file = "coverage-7.6.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c9192925acc33e146864b8cf037e2ed32a91fdf7644ae875f5d46cd2ef086a5f"}, - {file = "coverage-7.6.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bf4eeecc9e10f5403ec06138978235af79c9a79af494eb6b1d60a50b49ed2869"}, - {file = "coverage-7.6.2-cp313-cp313-win32.whl", hash = "sha256:e4ee15b267d2dad3e8759ca441ad450c334f3733304c55210c2a44516e8d5530"}, - {file = "coverage-7.6.2-cp313-cp313-win_amd64.whl", hash = "sha256:c71965d1ced48bf97aab79fad56df82c566b4c498ffc09c2094605727c4b7e36"}, - {file = "coverage-7.6.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7571e8bbecc6ac066256f9de40365ff833553e2e0c0c004f4482facb131820ef"}, - {file = "coverage-7.6.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:078a87519057dacb5d77e333f740708ec2a8f768655f1db07f8dfd28d7a005f0"}, - {file = "coverage-7.6.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e5e92e3e84a8718d2de36cd8387459cba9a4508337b8c5f450ce42b87a9e760"}, - {file = "coverage-7.6.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ebabdf1c76593a09ee18c1a06cd3022919861365219ea3aca0247ededf6facd6"}, - {file = "coverage-7.6.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12179eb0575b8900912711688e45474f04ab3934aaa7b624dea7b3c511ecc90f"}, - {file = "coverage-7.6.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:39d3b964abfe1519b9d313ab28abf1d02faea26cd14b27f5283849bf59479ff5"}, - {file = "coverage-7.6.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:84c4315577f7cd511d6250ffd0f695c825efe729f4205c0340f7004eda51191f"}, - {file = "coverage-7.6.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ff797320dcbff57caa6b2301c3913784a010e13b1f6cf4ab3f563f3c5e7919db"}, - {file = "coverage-7.6.2-cp313-cp313t-win32.whl", hash = "sha256:2b636a301e53964550e2f3094484fa5a96e699db318d65398cfba438c5c92171"}, - {file = "coverage-7.6.2-cp313-cp313t-win_amd64.whl", hash = "sha256:d03a060ac1a08e10589c27d509bbdb35b65f2d7f3f8d81cf2fa199877c7bc58a"}, - {file = "coverage-7.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c37faddc8acd826cfc5e2392531aba734b229741d3daec7f4c777a8f0d4993e5"}, - {file = "coverage-7.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab31fdd643f162c467cfe6a86e9cb5f1965b632e5e65c072d90854ff486d02cf"}, - {file = "coverage-7.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97df87e1a20deb75ac7d920c812e9326096aa00a9a4b6d07679b4f1f14b06c90"}, - {file = "coverage-7.6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:343056c5e0737487a5291f5691f4dfeb25b3e3c8699b4d36b92bb0e586219d14"}, - {file = "coverage-7.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4ef1c56b47b6b9024b939d503ab487231df1f722065a48f4fc61832130b90e"}, - {file = "coverage-7.6.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fca4a92c8a7a73dee6946471bce6d1443d94155694b893b79e19ca2a540d86e"}, - {file = "coverage-7.6.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69f251804e052fc46d29d0e7348cdc5fcbfc4861dc4a1ebedef7e78d241ad39e"}, - {file = "coverage-7.6.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e8ea055b3ea046c0f66217af65bc193bbbeca1c8661dc5fd42698db5795d2627"}, - {file = "coverage-7.6.2-cp39-cp39-win32.whl", hash = "sha256:6c2ba1e0c24d8fae8f2cf0aeb2fc0a2a7f69b6d20bd8d3749fd6b36ecef5edf0"}, - {file = "coverage-7.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:2186369a654a15628e9c1c9921409a6b3eda833e4b91f3ca2a7d9f77abb4987c"}, - {file = "coverage-7.6.2-pp39.pp310-none-any.whl", hash = "sha256:667952739daafe9616db19fbedbdb87917eee253ac4f31d70c7587f7ab531b4e"}, - {file = "coverage-7.6.2.tar.gz", hash = "sha256:a5f81e68aa62bc0cfca04f7b19eaa8f9c826b53fc82ab9e2121976dc74f131f3"}, + {file = "coverage-7.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6da42bbcec130b188169107ecb6ee7bd7b4c849d24c9370a0c884cf728d8e976"}, + {file = "coverage-7.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c222958f59b0ae091f4535851cbb24eb57fc0baea07ba675af718fb5302dddb2"}, + {file = "coverage-7.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab84a8b698ad5a6c365b08061920138e7a7dd9a04b6feb09ba1bfae68346ce6d"}, + {file = "coverage-7.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70a6756ce66cd6fe8486c775b30889f0dc4cb20c157aa8c35b45fd7868255c5c"}, + {file = "coverage-7.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c2e6fa98032fec8282f6b27e3f3986c6e05702828380618776ad794e938f53a"}, + {file = "coverage-7.6.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:921fbe13492caf6a69528f09d5d7c7d518c8d0e7b9f6701b7719715f29a71e6e"}, + {file = "coverage-7.6.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6d99198203f0b9cb0b5d1c0393859555bc26b548223a769baf7e321a627ed4fc"}, + {file = "coverage-7.6.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:87cd2e29067ea397a47e352efb13f976eb1b03e18c999270bb50589323294c6e"}, + {file = "coverage-7.6.3-cp310-cp310-win32.whl", hash = "sha256:a3328c3e64ea4ab12b85999eb0779e6139295bbf5485f69d42cf794309e3d007"}, + {file = "coverage-7.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:bca4c8abc50d38f9773c1ec80d43f3768df2e8576807d1656016b9d3eeaa96fd"}, + {file = "coverage-7.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c51ef82302386d686feea1c44dbeef744585da16fcf97deea2a8d6c1556f519b"}, + {file = "coverage-7.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0ca37993206402c6c35dc717f90d4c8f53568a8b80f0bf1a1b2b334f4d488fba"}, + {file = "coverage-7.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c77326300b839c44c3e5a8fe26c15b7e87b2f32dfd2fc9fee1d13604347c9b38"}, + {file = "coverage-7.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e484e479860e00da1f005cd19d1c5d4a813324e5951319ac3f3eefb497cc549"}, + {file = "coverage-7.6.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c6c0f4d53ef603397fc894a895b960ecd7d44c727df42a8d500031716d4e8d2"}, + {file = "coverage-7.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:37be7b5ea3ff5b7c4a9db16074dc94523b5f10dd1f3b362a827af66a55198175"}, + {file = "coverage-7.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:43b32a06c47539fe275106b376658638b418c7cfdfff0e0259fbf877e845f14b"}, + {file = "coverage-7.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee77c7bef0724165e795b6b7bf9c4c22a9b8468a6bdb9c6b4281293c6b22a90f"}, + {file = "coverage-7.6.3-cp311-cp311-win32.whl", hash = "sha256:43517e1f6b19f610a93d8227e47790722c8bf7422e46b365e0469fc3d3563d97"}, + {file = "coverage-7.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:04f2189716e85ec9192df307f7c255f90e78b6e9863a03223c3b998d24a3c6c6"}, + {file = "coverage-7.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27bd5f18d8f2879e45724b0ce74f61811639a846ff0e5c0395b7818fae87aec6"}, + {file = "coverage-7.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d546cfa78844b8b9c1c0533de1851569a13f87449897bbc95d698d1d3cb2a30f"}, + {file = "coverage-7.6.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9975442f2e7a5cfcf87299c26b5a45266ab0696348420049b9b94b2ad3d40234"}, + {file = "coverage-7.6.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:583049c63106c0555e3ae3931edab5669668bbef84c15861421b94e121878d3f"}, + {file = "coverage-7.6.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2341a78ae3a5ed454d524206a3fcb3cec408c2a0c7c2752cd78b606a2ff15af4"}, + {file = "coverage-7.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a4fb91d5f72b7e06a14ff4ae5be625a81cd7e5f869d7a54578fc271d08d58ae3"}, + {file = "coverage-7.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e279f3db904e3b55f520f11f983cc8dc8a4ce9b65f11692d4718ed021ec58b83"}, + {file = "coverage-7.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aa23ce39661a3e90eea5f99ec59b763b7d655c2cada10729ed920a38bfc2b167"}, + {file = "coverage-7.6.3-cp312-cp312-win32.whl", hash = "sha256:52ac29cc72ee7e25ace7807249638f94c9b6a862c56b1df015d2b2e388e51dbd"}, + {file = "coverage-7.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:40e8b1983080439d4802d80b951f4a93d991ef3261f69e81095a66f86cf3c3c6"}, + {file = "coverage-7.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9134032f5aa445ae591c2ba6991d10136a1f533b1d2fa8f8c21126468c5025c6"}, + {file = "coverage-7.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:99670790f21a96665a35849990b1df447993880bb6463a0a1d757897f30da929"}, + {file = "coverage-7.6.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc7d6b380ca76f5e817ac9eef0c3686e7834c8346bef30b041a4ad286449990"}, + {file = "coverage-7.6.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7b26757b22faf88fcf232f5f0e62f6e0fd9e22a8a5d0d5016888cdfe1f6c1c4"}, + {file = "coverage-7.6.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c59d6a4a4633fad297f943c03d0d2569867bd5372eb5684befdff8df8522e39"}, + {file = "coverage-7.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f263b18692f8ed52c8de7f40a0751e79015983dbd77b16906e5b310a39d3ca21"}, + {file = "coverage-7.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:79644f68a6ff23b251cae1c82b01a0b51bc40c8468ca9585c6c4b1aeee570e0b"}, + {file = "coverage-7.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:71967c35828c9ff94e8c7d405469a1fb68257f686bca7c1ed85ed34e7c2529c4"}, + {file = "coverage-7.6.3-cp313-cp313-win32.whl", hash = "sha256:e266af4da2c1a4cbc6135a570c64577fd3e6eb204607eaff99d8e9b710003c6f"}, + {file = "coverage-7.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:ea52bd218d4ba260399a8ae4bb6b577d82adfc4518b93566ce1fddd4a49d1dce"}, + {file = "coverage-7.6.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8d4c6ea0f498c7c79111033a290d060c517853a7bcb2f46516f591dab628ddd3"}, + {file = "coverage-7.6.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:331b200ad03dbaa44151d74daeb7da2cf382db424ab923574f6ecca7d3b30de3"}, + {file = "coverage-7.6.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54356a76b67cf8a3085818026bb556545ebb8353951923b88292556dfa9f812d"}, + {file = "coverage-7.6.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ebec65f5068e7df2d49466aab9128510c4867e532e07cb6960075b27658dca38"}, + {file = "coverage-7.6.3-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d33a785ea8354c480515e781554d3be582a86297e41ccbea627a5c632647f2cd"}, + {file = "coverage-7.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f7ddb920106bbbbcaf2a274d56f46956bf56ecbde210d88061824a95bdd94e92"}, + {file = "coverage-7.6.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:70d24936ca6c15a3bbc91ee9c7fc661132c6f4c9d42a23b31b6686c05073bde5"}, + {file = "coverage-7.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c30e42ea11badb147f0d2e387115b15e2bd8205a5ad70d6ad79cf37f6ac08c91"}, + {file = "coverage-7.6.3-cp313-cp313t-win32.whl", hash = "sha256:365defc257c687ce3e7d275f39738dcd230777424117a6c76043459db131dd43"}, + {file = "coverage-7.6.3-cp313-cp313t-win_amd64.whl", hash = "sha256:23bb63ae3f4c645d2d82fa22697364b0046fbafb6261b258a58587441c5f7bd0"}, + {file = "coverage-7.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:da29ceabe3025a1e5a5aeeb331c5b1af686daab4ff0fb4f83df18b1180ea83e2"}, + {file = "coverage-7.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df8c05a0f574d480947cba11b947dc41b1265d721c3777881da2fb8d3a1ddfba"}, + {file = "coverage-7.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec1e3b40b82236d100d259854840555469fad4db64f669ab817279eb95cd535c"}, + {file = "coverage-7.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4adeb878a374126f1e5cf03b87f66279f479e01af0e9a654cf6d1509af46c40"}, + {file = "coverage-7.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43d6a66e33b1455b98fc7312b124296dad97a2e191c80320587234a77b1b736e"}, + {file = "coverage-7.6.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1990b1f4e2c402beb317840030bb9f1b6a363f86e14e21b4212e618acdfce7f6"}, + {file = "coverage-7.6.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:12f9515d875859faedb4144fd38694a761cd2a61ef9603bf887b13956d0bbfbb"}, + {file = "coverage-7.6.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:99ded130555c021d99729fabd4ddb91a6f4cc0707df4b1daf912c7850c373b13"}, + {file = "coverage-7.6.3-cp39-cp39-win32.whl", hash = "sha256:c3a79f56dee9136084cf84a6c7c4341427ef36e05ae6415bf7d787c96ff5eaa3"}, + {file = "coverage-7.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:aac7501ae73d4a02f4b7ac8fcb9dc55342ca98ffb9ed9f2dfb8a25d53eda0e4d"}, + {file = "coverage-7.6.3-pp39.pp310-none-any.whl", hash = "sha256:b9853509b4bf57ba7b1f99b9d866c422c9c5248799ab20e652bbb8a184a38181"}, + {file = "coverage-7.6.3.tar.gz", hash = "sha256:bb7d5fe92bd0dc235f63ebe9f8c6e0884f7360f88f3411bfed1350c872ef2054"}, ] [package.dependencies] @@ -1578,51 +1578,58 @@ trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httptools" -version = "0.6.1" +version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = true python-versions = ">=3.8.0" files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, ] [package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] +test = ["Cython (>=0.29.24)"] [[package]] name = "httpx" @@ -2487,32 +2494,33 @@ wcwidth = "*" [[package]] name = "psutil" -version = "6.0.0" +version = "6.1.0" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, - {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, - {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, - {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, - {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, - {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, - {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, - {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, - {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, - {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, + {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, + {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, + {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, + {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, + {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, + {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, + {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, + {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, ] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "psycopg" @@ -2539,24 +2547,20 @@ test = ["anyio (>=4.0)", "mypy (>=1.11)", "pproxy (>=2.7)", "pytest (>=6.2.5)", [[package]] name = "psycopg2" -version = "2.9.9" +version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, - {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, - {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, - {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, - {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, - {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, - {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, - {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, - {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, - {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, - {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, + {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, + {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, + {file = "psycopg2-2.9.10-cp311-cp311-win32.whl", hash = "sha256:47c4f9875125344f4c2b870e41b6aad585901318068acd01de93f3677a6522c2"}, + {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, + {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, + {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, + {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, + {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, + {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, ] [[package]] @@ -2916,24 +2920,64 @@ files = [ [[package]] name = "pyyaml" -version = "5.1.2" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "PyYAML-5.1.2-cp27-cp27m-win32.whl", hash = "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8"}, - {file = "PyYAML-5.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8"}, - {file = "PyYAML-5.1.2-cp34-cp34m-win32.whl", hash = "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9"}, - {file = "PyYAML-5.1.2-cp34-cp34m-win_amd64.whl", hash = "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696"}, - {file = "PyYAML-5.1.2-cp35-cp35m-win32.whl", hash = "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41"}, - {file = "PyYAML-5.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73"}, - {file = "PyYAML-5.1.2-cp36-cp36m-win32.whl", hash = "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299"}, - {file = "PyYAML-5.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b"}, - {file = "PyYAML-5.1.2-cp37-cp37m-win32.whl", hash = "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae"}, - {file = "PyYAML-5.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34"}, - {file = "PyYAML-5.1.2-cp38-cp38m-win32.whl", hash = "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9"}, - {file = "PyYAML-5.1.2-cp38-cp38m-win_amd64.whl", hash = "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681"}, - {file = "PyYAML-5.1.2.tar.gz", hash = "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -3182,13 +3226,13 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] [[package]] name = "setuptools" -version = "75.1.0" +version = "75.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, - {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, + {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, + {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, ] [package.extras] @@ -3380,60 +3424,68 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.35" +version = "2.0.36" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, - {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, - {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, ] [package.dependencies] @@ -3447,7 +3499,7 @@ aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] @@ -3757,13 +3809,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.31.1" +version = "0.32.0" description = "The lightning-fast ASGI server." optional = true python-versions = ">=3.8" files = [ - {file = "uvicorn-0.31.1-py3-none-any.whl", hash = "sha256:adc42d9cac80cf3e51af97c1851648066841e7cfb6993a4ca8de29ac1548ed41"}, - {file = "uvicorn-0.31.1.tar.gz", hash = "sha256:f5167919867b161b7bcaf32646c6a94cdbd4c3aa2eb5c17d36bb9aa5cfd8c493"}, + {file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"}, + {file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"}, ] [package.dependencies] @@ -3783,47 +3835,54 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [[package]] name = "uvloop" -version = "0.20.0" +version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = true python-versions = ">=3.8.0" files = [ - {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996"}, - {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b"}, - {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10"}, - {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae"}, - {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006"}, - {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73"}, - {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037"}, - {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9"}, - {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e"}, - {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756"}, - {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0"}, - {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf"}, - {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d"}, - {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e"}, - {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9"}, - {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab"}, - {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5"}, - {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00"}, - {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0e94b221295b5e69de57a1bd4aeb0b3a29f61be6e1b478bb8a69a73377db7ba"}, - {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fee6044b64c965c425b65a4e17719953b96e065c5b7e09b599ff332bb2744bdf"}, - {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:265a99a2ff41a0fd56c19c3838b29bf54d1d177964c300dad388b27e84fd7847"}, - {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10c2956efcecb981bf9cfb8184d27d5d64b9033f917115a960b83f11bfa0d6b"}, - {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e7d61fe8e8d9335fac1bf8d5d82820b4808dd7a43020c149b63a1ada953d48a6"}, - {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2beee18efd33fa6fdb0976e18475a4042cd31c7433c866e8a09ab604c7c22ff2"}, - {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8c36fdf3e02cec92aed2d44f63565ad1522a499c654f07935c8f9d04db69e95"}, - {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0fac7be202596c7126146660725157d4813aa29a4cc990fe51346f75ff8fde7"}, - {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d0fba61846f294bce41eb44d60d58136090ea2b5b99efd21cbdf4e21927c56a"}, - {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95720bae002ac357202e0d866128eb1ac82545bcf0b549b9abe91b5178d9b541"}, - {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:36c530d8fa03bfa7085af54a48f2ca16ab74df3ec7108a46ba82fd8b411a2315"}, - {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e97152983442b499d7a71e44f29baa75b3b02e65d9c44ba53b10338e98dedb66"}, - {file = "uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, + {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, ] [package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] [[package]] name = "virtualenv" @@ -4120,4 +4179,4 @@ server = ["alembic", "arq", "authlib", "biocommons", "boto3", "cdot", "cryptogra [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "f5a4cedf018200abbbb7eebf9d2a51110454c5dac959d3ab0601bc185e2a351c" +content-hash = "ae56dc1a1a2c4b05f6374fd532267500028eb43f1c78ae9ca908f3b712868bec" diff --git a/pyproject.toml b/pyproject.toml index 98e8a828..b95efe2c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "mavedb" -version = "2024.4.1" +version = "2024.4.2" description = "API for MaveDB, the database of Multiplexed Assays of Variant Effect." license = "AGPL-3.0-only" readme = "README.md" @@ -26,7 +26,7 @@ python = "^3.9" fqfa = "~1.3.0" pyhumps = "~3.8.0" -pyyaml = "~5.1" +pyyaml = "~6.0.1" IDUtils = "~1.2.0" mavehgvs = "~0.6.0" eutils = "~0.6.0" @@ -99,6 +99,7 @@ mypy_path = "mypy_stubs" addopts = "-v -rP --import-mode=importlib --disable-socket --allow-hosts localhost,::1,127.0.0.1" asyncio_mode = 'strict' testpaths = "tests/" +pythonpath = "." norecursedirs = "tests/helpers/" # Uncomment the following lines to include application log output in Pytest logs. # log_cli = true diff --git a/src/mavedb/__init__.py b/src/mavedb/__init__.py index 5ad78a08..b54c8e43 100644 --- a/src/mavedb/__init__.py +++ b/src/mavedb/__init__.py @@ -6,6 +6,6 @@ logger = module_logging.getLogger(__name__) __project__ = "mavedb-api" -__version__ = "2024.4.1" +__version__ = "2024.4.2" logger.info(f"MaveDB {__version__}") diff --git a/src/mavedb/lib/experiments.py b/src/mavedb/lib/experiments.py index fcdb3814..d771f26b 100644 --- a/src/mavedb/lib/experiments.py +++ b/src/mavedb/lib/experiments.py @@ -99,7 +99,7 @@ def search_experiments( ) ) - items: list[Experiment] = query.order_by(Experiment.title).all() + items: list[Experiment] = query.order_by(Experiment.urn, Experiment.title).all() if not items: items = [] diff --git a/src/mavedb/lib/score_sets.py b/src/mavedb/lib/score_sets.py index 8384ecbf..775e067d 100644 --- a/src/mavedb/lib/score_sets.py +++ b/src/mavedb/lib/score_sets.py @@ -617,6 +617,21 @@ def create_variants(db, score_set: ScoreSet, variants_data: list[VariantData], b return len(score_set.variants) +def refresh_variant_urns(db: Session, score_set: ScoreSet): + variants = db.execute(select(Variant).where(Variant.score_set_id == score_set.id)).scalars() + + for variant in variants: + if not variant.urn: + raise ValueError("All variants should have an associated URN.") + + variant_number = variant.urn.split("#")[1] + refreshed_urn = f"{score_set.urn}#{variant_number}" + variant.urn = refreshed_urn + db.add(variant) + + db.commit() + + def bulk_create_urns(n, score_set, reset_counter=False) -> list[str]: start_value = 0 if reset_counter else score_set.num_variants parent_urn = score_set.urn diff --git a/src/mavedb/lib/script_environment.py b/src/mavedb/lib/script_environment.py deleted file mode 100644 index d81e909d..00000000 --- a/src/mavedb/lib/script_environment.py +++ /dev/null @@ -1,25 +0,0 @@ -""" -Environment setup for scripts. -""" - -from sqlalchemy.orm import Session, configure_mappers - -from mavedb import deps -from mavedb.models import * # noqa: F403 - - -def init_script_environment() -> Session: - """ - Set up the environment for a script that may be run from the command line and does not necessarily depend on the - FastAPI framework. - - Features: - - Configures logging for the script. - - Loads the SQLAlchemy data model. - - Returns an SQLAlchemy database session. - """ - # Scan all our model classes and create backref attributes. Otherwise, these attributes only get added to classes once - # an instance of the related class has been created. - configure_mappers() - - return next(deps.get_db()) diff --git a/src/mavedb/lib/target_genes.py b/src/mavedb/lib/target_genes.py new file mode 100644 index 00000000..614864f6 --- /dev/null +++ b/src/mavedb/lib/target_genes.py @@ -0,0 +1,56 @@ +import logging +from typing import Optional + +from sqlalchemy import func, or_ +from sqlalchemy.orm import Session + +from mavedb.lib.logging.context import logging_context, save_to_logging_context +from mavedb.models.contributor import Contributor +from mavedb.models.score_set import ScoreSet +from mavedb.models.target_gene import TargetGene +from mavedb.models.user import User +from mavedb.view_models.search import TextSearch + +logger = logging.getLogger(__name__) + + +def search_target_genes( + db: Session, + owner_or_contributor: Optional[User], + search: TextSearch, + limit: Optional[int], +) -> list[TargetGene]: + save_to_logging_context({"target_gene_search_criteria": search.dict()}) + + query = db.query(TargetGene) + + if search.text and len(search.text.strip()) > 0: + lower_search_text = search.text.strip().lower() + query = query.filter(func.lower(TargetGene.name).contains(lower_search_text)) + if owner_or_contributor is not None: + query = query.filter( + TargetGene.score_set.has( + or_( + ScoreSet.created_by_id == owner_or_contributor.id, + ScoreSet.contributors.any( + Contributor.orcid_id == owner_or_contributor.username + ), + ) + ) + ) + + query = query.order_by(TargetGene.name) + if limit is not None: + query = query.limit(limit) + + target_genes = query.all() + if not target_genes: + target_genes = [] + + save_to_logging_context({"matching_resources": len(target_genes)}) + logger.debug( + msg=f"Target gene search yielded {len(target_genes)} matching resources.", + extra=logging_context(), + ) + + return target_genes diff --git a/src/mavedb/lib/validation/constants/target.py b/src/mavedb/lib/validation/constants/target.py index 6bf8392e..f64b4bd4 100644 --- a/src/mavedb/lib/validation/constants/target.py +++ b/src/mavedb/lib/validation/constants/target.py @@ -1,2 +1 @@ -valid_categories = ["Protein coding", "Regulatory", "Other noncoding"] valid_sequence_types = ["infer", "dna", "protein"] diff --git a/src/mavedb/lib/validation/target.py b/src/mavedb/lib/validation/target.py index f22121ac..3d65c7b9 100644 --- a/src/mavedb/lib/validation/target.py +++ b/src/mavedb/lib/validation/target.py @@ -1,31 +1,10 @@ from fqfa import infer_sequence_type from fqfa.validator import amino_acids_validator, dna_bases_validator -from mavedb.lib.validation.constants.target import valid_categories, valid_sequence_types +from mavedb.lib.validation.constants.target import valid_sequence_types from mavedb.lib.validation.exceptions import ValidationError -def validate_target_category(category: str): - """ - If the target category provided does not fall within a pre-defined list of valid categories. - - Parameters - __________ - category: str - The target category to be validated. - - Raises - ______ - ValidationError - If the target category provided is not valid. - """ - if category not in valid_categories: - raise ValidationError( - "{} is not a valid target category. Valid categories are " - "Protein coding, Regulatory, and Other noncoding".format(category) - ) - - def validate_sequence_category(sequence_type: str): """ If the sequence type provided does not fall within a pre-defined list of valid sequence types. diff --git a/src/mavedb/models/enums/target_category.py b/src/mavedb/models/enums/target_category.py new file mode 100644 index 00000000..540ca847 --- /dev/null +++ b/src/mavedb/models/enums/target_category.py @@ -0,0 +1,7 @@ +from enum import Enum + + +class TargetCategory(str, Enum): + protein_coding = "protein_coding" + regulatory = "regulatory" + other_noncoding = "other_noncoding" diff --git a/src/mavedb/models/license.py b/src/mavedb/models/license.py index a7096a09..65efa67c 100644 --- a/src/mavedb/models/license.py +++ b/src/mavedb/models/license.py @@ -1,6 +1,6 @@ from datetime import date -from sqlalchemy import Column, Date, Integer, String +from sqlalchemy import Boolean, Column, Date, Integer, String from mavedb.db.base import Base @@ -16,3 +16,4 @@ class License(Base): version = Column(String, nullable=True, unique=False) creation_date = Column(Date, nullable=False, default=date.today) modification_date = Column(Date, nullable=False, default=date.today, onupdate=date.today) + active = Column(Boolean, nullable=False) diff --git a/src/mavedb/models/mapped_variant.py b/src/mavedb/models/mapped_variant.py index 5a418b22..57cefd03 100644 --- a/src/mavedb/models/mapped_variant.py +++ b/src/mavedb/models/mapped_variant.py @@ -14,8 +14,8 @@ class MappedVariant(Base): id = Column(Integer, primary_key=True) - pre_mapped = Column(JSONB, nullable=True) - post_mapped = Column(JSONB, nullable=True) + pre_mapped = Column(JSONB(none_as_null=True), nullable=True) + post_mapped = Column(JSONB(none_as_null=True), nullable=True) vrs_version = Column(String, nullable=True) error_message = Column(String, nullable=True) modification_date = Column(Date, nullable=False, default=date.today, onupdate=date.today) diff --git a/src/mavedb/models/target_gene.py b/src/mavedb/models/target_gene.py index 36b88f85..19f3ea96 100644 --- a/src/mavedb/models/target_gene.py +++ b/src/mavedb/models/target_gene.py @@ -1,11 +1,12 @@ from datetime import date from typing import TYPE_CHECKING -from sqlalchemy import Column, Date, ForeignKey, Integer, String +from sqlalchemy import Column, Date, Enum, ForeignKey, Integer, String from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped, backref, relationship from mavedb.db.base import Base +from mavedb.models.enums.target_category import TargetCategory from mavedb.models.score_set import ScoreSet from mavedb.models.target_accession import TargetAccession from mavedb.models.target_sequence import TargetSequence @@ -24,7 +25,10 @@ class TargetGene(Base): id = Column(Integer, primary_key=True) name = Column(String, nullable=False) - category = Column(String, nullable=False) + category = Column( + Enum(TargetCategory, create_constraint=True, length=32, native_enum=False, validate_strings=True), + nullable=False, + ) score_set_id = Column("scoreset_id", Integer, ForeignKey("scoresets.id"), index=True, nullable=False) score_set: Mapped[ScoreSet] = relationship(back_populates="target_genes", single_parent=True, uselist=True) diff --git a/src/mavedb/routers/licenses.py b/src/mavedb/routers/licenses.py index 0bded44a..78b29aa1 100644 --- a/src/mavedb/routers/licenses.py +++ b/src/mavedb/routers/licenses.py @@ -23,6 +23,19 @@ def list_licenses( return items +@router.get("/active", status_code=200, response_model=List[license.ShortLicense], responses={404: {}}) +def list_active_licenses( + *, + db: Session = Depends(deps.get_db), +) -> Any: + """ + List active licenses. + """ + + items = db.query(License).where(License.active.is_(True)).order_by(License.short_name).all() + return items + + @router.get("/{item_id}", status_code=200, response_model=license.License, responses={404: {}}) def fetch_license( *, diff --git a/src/mavedb/routers/score_sets.py b/src/mavedb/routers/score_sets.py index 1746e703..353ee1ab 100644 --- a/src/mavedb/routers/score_sets.py +++ b/src/mavedb/routers/score_sets.py @@ -39,6 +39,7 @@ ) from mavedb.lib.score_sets import ( search_score_sets as _search_score_sets, + refresh_variant_urns, ) from mavedb.lib.taxonomies import find_or_create_taxonomy from mavedb.lib.urns import ( @@ -333,6 +334,10 @@ async def create_score_set( msg="Failed to create score set; The requested experiment does not exist.", extra=logging_context() ) raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Unknown experiment") + # Not allow add score set in meta-analysis experiments. + if any(s.meta_analyzes_score_sets for s in experiment.score_sets): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, + detail="Score sets may not be added to a meta-analysis experiment.") save_to_logging_context({"experiment": experiment.urn}) assert_permission(user_data, experiment, Action.ADD_SCORE_SET) @@ -343,6 +348,11 @@ async def create_score_set( if not license_: logger.info(msg="Failed to create score set; The requested license does not exist.", extra=logging_context()) raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Unknown license") + elif not license_.active: + logger.info( + msg="Failed to create score set; The requested license is no longer active.", extra=logging_context() + ) + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid license") save_to_logging_context({"requested_superseded_score_set": item_create.superseded_score_set_urn}) if item_create.superseded_score_set_urn is not None: @@ -385,7 +395,7 @@ async def create_score_set( ) if len(meta_analyzes_score_sets) > 0: - # If any existing score set is a meta-analysis for score sets in the same collection of exepriment sets, use its + # If any existing score set is a meta-analysis for score sets in the same collection of experiment sets, use its # experiment as the parent of our new meta-analysis. Otherwise, create a new experiment. meta_analyzes_experiment_sets = list( set( @@ -646,7 +656,9 @@ async def upload_score_set_variant_data( return item -@router.put("/score-sets/{urn}", response_model=score_set.ScoreSet, responses={422: {}}) +@router.put( + "/score-sets/{urn}", response_model=score_set.ScoreSet, responses={422: {}}, response_model_exclude_none=True +) async def update_score_set( *, urn: str, @@ -668,68 +680,73 @@ async def update_score_set( assert_permission(user_data, item, Action.UPDATE) - # Editing unpublished score set - if item.private is True: - license_ = None - - if item_update.license_id is not None: - save_to_logging_context({"license": item_update.license_id}) - license_ = db.query(License).filter(License.id == item_update.license_id).one_or_none() + for var, value in vars(item_update).items(): + if var not in [ + "contributors", + "score_ranges", + "doi_identifiers", + "experiment_urn", + "license_id", + "secondary_publication_identifiers", + "primary_publication_identifiers", + "target_genes", + ]: + setattr(item, var, value) if value else None + + if item_update.license_id is not None: + save_to_logging_context({"license": item_update.license_id}) + license_ = db.query(License).filter(License.id == item_update.license_id).one_or_none() + + if not license_: + logger.info( + msg="Failed to update score set; The requested license does not exist.", extra=logging_context() + ) + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Unknown license") - if not license_: - logger.info( - msg="Failed to update score set; The requested license does not exist.", extra=logging_context() - ) - raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Unknown license") + # Allow in-active licenses to be retained on update if they already exist on the item. + elif not license_.active and item.licence_id != item_update.license_id: + logger.info( + msg="Failed to update score set license; The requested license is no longer active.", + extra=logging_context(), + ) + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid license") - item.license = license_ + item.license = license_ - for var, value in vars(item_update).items(): - if var not in [ - "contributors", - "score_ranges", - "doi_identifiers", - "experiment_urn", - "license_id", - "secondary_publication_identifiers", - "primary_publication_identifiers", - "target_genes", - ]: - setattr(item, var, value) if value else None - - try: - item.contributors = [ - await find_or_create_contributor(db, contributor.orcid_id) - for contributor in item_update.contributors or [] - ] - except NonexistentOrcidUserError as e: - logger.error(msg="Could not find ORCID user with the provided user ID.", extra=logging_context()) - raise pydantic.ValidationError( - [pydantic.error_wrappers.ErrorWrapper(ValidationError(str(e)), loc="contributors")], - model=score_set.ScoreSetUpdate, - ) + item.doi_identifiers = [ + await find_or_create_doi_identifier(db, identifier.identifier) + for identifier in item_update.doi_identifiers or [] + ] + primary_publication_identifiers = [ + await find_or_create_publication_identifier(db, identifier.identifier, identifier.db_name) + for identifier in item_update.primary_publication_identifiers or [] + ] + publication_identifiers = [ + await find_or_create_publication_identifier(db, identifier.identifier, identifier.db_name) + for identifier in item_update.secondary_publication_identifiers or [] + ] + primary_publication_identifiers - item.doi_identifiers = [ - await find_or_create_doi_identifier(db, identifier.identifier) - for identifier in item_update.doi_identifiers or [] - ] - primary_publication_identifiers = [ - await find_or_create_publication_identifier(db, identifier.identifier, identifier.db_name) - for identifier in item_update.primary_publication_identifiers or [] - ] - publication_identifiers = [ - await find_or_create_publication_identifier(db, identifier.identifier, identifier.db_name) - for identifier in item_update.secondary_publication_identifiers or [] - ] + primary_publication_identifiers + # create a temporary `primary` attribute on each of our publications that indicates + # to our association proxy whether it is a primary publication or not + primary_identifiers = [p.identifier for p in primary_publication_identifiers] + for publication in publication_identifiers: + setattr(publication, "primary", publication.identifier in primary_identifiers) - # create a temporary `primary` attribute on each of our publications that indicates - # to our association proxy whether it is a primary publication or not - primary_identifiers = [pub.identifier for pub in primary_publication_identifiers] - for publication in publication_identifiers: - setattr(publication, "primary", publication.identifier in primary_identifiers) + item.publication_identifiers = publication_identifiers - item.publication_identifiers = publication_identifiers + try: + item.contributors = [ + await find_or_create_contributor(db, contributor.orcid_id) for contributor in item_update.contributors or [] + ] + except NonexistentOrcidUserError as e: + logger.error(msg="Could not find ORCID user with the provided user ID.", extra=logging_context()) + raise pydantic.ValidationError( + [pydantic.error_wrappers.ErrorWrapper(ValidationError(str(e)), loc="contributors")], + model=score_set.ScoreSetUpdate, + ) + # Score set has not been published and attributes affecting scores may still be edited. + if item.private: if item_update.score_ranges: item.score_ranges = item_update.score_ranges.dict() else: @@ -884,35 +901,8 @@ async def update_score_set( if job is not None: save_to_logging_context({"worker_job_id": job.job_id}) logger.info(msg="Enqueud variant creation job.", extra=logging_context()) - - for var, value in vars(item_update).items(): - if var not in [ - "score_ranges", - "contributors", - "doi_identifiers", - "experiment_urn", - "primary_publication_identifiers", - "secondary_publication_identifiers", - "target_genes", - ]: - setattr(item, var, value) if value else None - - # Editing published score set else: - for var, value in vars(item_update).items(): - if var in ["title", "method_text", "abstract_text", "short_description"]: - setattr(item, var, value) if value else None - try: - item.contributors = [ - await find_or_create_contributor(db, contributor.orcid_id) - for contributor in item_update.contributors or [] - ] - except NonexistentOrcidUserError as e: - logger.error(msg="Could not find ORCID user with the provided user ID.", extra=logging_context()) - raise pydantic.ValidationError( - [pydantic.error_wrappers.ErrorWrapper(ValidationError(str(e)), loc="contributors")], - model=score_set.ScoreSetUpdate, - ) + logger.debug(msg="Skipped score range and target gene update. Score set is published.", extra=logging_context()) db.add(item) db.commit() @@ -1034,6 +1024,7 @@ def publish_score_set( item.urn = generate_score_set_urn(db, item.experiment) item.private = False item.published_date = published_date + refresh_variant_urns(db, item) save_to_logging_context({"score_set": item.urn}) diff --git a/src/mavedb/routers/target_genes.py b/src/mavedb/routers/target_genes.py index abbd23ea..8c94b57c 100644 --- a/src/mavedb/routers/target_genes.py +++ b/src/mavedb/routers/target_genes.py @@ -1,18 +1,35 @@ from typing import Any, List from fastapi import APIRouter, Depends, HTTPException -from sqlalchemy import func from sqlalchemy.orm import Session from mavedb import deps +from mavedb.lib.authentication import UserData +from mavedb.lib.authorization import require_current_user +from mavedb.lib.target_genes import ( + search_target_genes as _search_target_genes, +) from mavedb.models.target_gene import TargetGene from mavedb.view_models import target_gene from mavedb.view_models.search import TextSearch -router = APIRouter(prefix="/api/v1/target-genes", tags=["target-genes"], responses={404: {"description": "Not found"}}) +router = APIRouter(prefix="/api/v1", tags=["target-genes"], responses={404: {"description": "Not found"}}) -@router.get("/", status_code=200, response_model=List[target_gene.TargetGene], responses={404: {}}) +@router.post("/me/target-genes/search", status_code=200, response_model=List[target_gene.TargetGene]) +def search_my_target_genes( + search: TextSearch, + db: Session = Depends(deps.get_db), + user_data: UserData = Depends(require_current_user) +) -> Any: + """ + Search my target genes. + """ + + return _search_target_genes(db, user_data.user, search, 50) + + +@router.get("/target-genes", status_code=200, response_model=List[target_gene.TargetGene], responses={404: {}}) def list_target_genes( *, db: Session = Depends(deps.get_db), @@ -24,7 +41,7 @@ def list_target_genes( return items -@router.get("/names", status_code=200, response_model=List[str], responses={404: {}}) +@router.get("/target-genes/names", status_code=200, response_model=List[str], responses={404: {}}) def list_target_gene_names( *, db: Session = Depends(deps.get_db), @@ -38,7 +55,7 @@ def list_target_gene_names( return sorted(list(set(names))) -@router.get("/categories", status_code=200, response_model=List[str], responses={404: {}}) +@router.get("/target-genes/categories", status_code=200, response_model=List[str], responses={404: {}}) def list_target_gene_categories( *, db: Session = Depends(deps.get_db), @@ -52,7 +69,7 @@ def list_target_gene_categories( return sorted(list(set(categories))) -@router.get("/{item_id}", status_code=200, response_model=target_gene.TargetGene, responses={404: {}}) +@router.get("/target-genes/{item_id}", status_code=200, response_model=target_gene.TargetGene, responses={404: {}}) def fetch_target_gene( *, item_id: int, @@ -67,20 +84,13 @@ def fetch_target_gene( return item -@router.post("/search", status_code=200, response_model=List[target_gene.TargetGene]) -def search_target_genes(search: TextSearch, db: Session = Depends(deps.get_db)) -> Any: +@router.post("/target-genes/search", status_code=200, response_model=List[target_gene.TargetGene]) +def search_target_genes( + search: TextSearch, + db: Session = Depends(deps.get_db) +) -> Any: """ Search target genes. """ - query = db.query(TargetGene) - - if search.text and len(search.text.strip()) > 0: - lower_search_text = search.text.strip().lower() - query = query.filter(func.lower(TargetGene.name).contains(lower_search_text)) - else: - raise HTTPException(status_code=500, detail="Search text is required") - items = query.order_by(TargetGene.name).limit(50).all() - if not items: - items = [] - return items + return _search_target_genes(db, None, search, 50) diff --git a/src/mavedb/scripts/environment.py b/src/mavedb/scripts/environment.py new file mode 100644 index 00000000..f773f55f --- /dev/null +++ b/src/mavedb/scripts/environment.py @@ -0,0 +1,160 @@ +""" +Environment setup for scripts. +""" + +import enum +import logging +import click +from functools import wraps + + +from sqlalchemy.orm import configure_mappers + +from mavedb import deps +from mavedb.models import * # noqa: F403 + + +logger = logging.getLogger(__name__) + + +@enum.unique +class DatabaseSessionAction(enum.Enum): + """ + Enum representing the database session transaction action selected for a + command decorated by :py:func:`.with_database_session`. + + You will not need to use this class unless you provide ``pass_action = + True`` to :py:func:`.with_database_session`. + """ + + DRY_RUN = "rollback" + PROMPT = "prompt" + COMMIT = "commit" + + +@click.group() +def script_environment(): + """ + Set up the environment for a script that may be run from the command line and does not necessarily depend on the + FastAPI framework. + + Features: + - Configures logging for the script. + - Loads the SQLAlchemy data model. + """ + + logging.basicConfig() + + # Un-comment this line to log all database queries: + logging.getLogger("__main__").setLevel(logging.INFO) + # logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO) + + # Scan all our model classes and create backref attributes. Otherwise, these attributes only get added to classes once + # an instance of the related class has been created. + configure_mappers() + + +def with_database_session(command=None, *, pass_action: bool = False): + """ + Decorator to provide database session and error handling for a *command*. + + The *command* callable must be a :py:class:`click.Command` instance. + + The decorated *command* is called with a ``db`` keyword argument to provide + a :class:`~id3c.db.session.DatabaseSession` object. The call happens + within an exception handler that commits or rollsback the database + transaction, possibly interactively. Three new options are added to the + *command* (``--dry-run``, ``--prompt``, and ``--commit``) to control this + behaviour. + + >>> @click.command + ... @with_database_session + ... def cmd(db: DatabaseSession): + ... pass + + If the optional, keyword-only argument *pass_action* is ``True``, then the + :py:class:`.DatabaseSessionAction` selected by the CLI options above is + passed as an additional ``action`` argument to the decorated *command*. + + >>> @click.command + ... @with_database_session(pass_action = True) + ... def cmd(db: DatabaseSession, action: DatabaseSessionAction): + ... pass + + One example where this is useful is when the *command* accesses + non-database resources and wants to extend dry run mode to them as well. + """ + + def decorator(command): + @click.option( + "--dry-run", + "action", + help="Only go through the motions of changing the database (default)", + flag_value=DatabaseSessionAction("rollback"), + type=DatabaseSessionAction, + default=True, + ) + @click.option( + "--prompt", + "action", + help="Ask if changes to the database should be saved", + flag_value=DatabaseSessionAction("prompt"), + type=DatabaseSessionAction, + ) + @click.option( + "--commit", + "action", + help="Save changes to the database", + flag_value=DatabaseSessionAction("commit"), + type=DatabaseSessionAction, + ) + @wraps(command) + def decorated(*args, action, **kwargs): + db = next(deps.get_db()) + + kwargs["db"] = db + + if pass_action: + kwargs["action"] = action + + processed_without_error = None + + try: + command(*args, **kwargs) + + except Exception as error: + processed_without_error = False + + logger.error(f"Aborting with error: {error}") + raise error from None + + else: + processed_without_error = True + + finally: + if action is DatabaseSessionAction.PROMPT: + ask_to_commit = ( + "Commit all changes?" + if processed_without_error + else "Commit successfully processed records up to this point?" + ) + + commit = click.confirm(ask_to_commit) + else: + commit = action is DatabaseSessionAction.COMMIT + + if commit: + logger.info( + "Committing all changes" + if processed_without_error + else "Committing successfully processed records up to this point" + ) + db.commit() + + else: + logger.info("Rolling back all changes; the database will not be modified") + db.rollback() + + return decorated + + return decorator(command) if command else decorator diff --git a/src/mavedb/scripts/export_public_data.py b/src/mavedb/scripts/export_public_data.py index 705e7981..4a52ee80 100644 --- a/src/mavedb/scripts/export_public_data.py +++ b/src/mavedb/scripts/export_public_data.py @@ -34,17 +34,16 @@ from fastapi.encoders import jsonable_encoder from sqlalchemy import select -from sqlalchemy.orm import lazyload +from sqlalchemy.orm import lazyload, Session from mavedb.lib.score_sets import get_score_set_counts_as_csv, get_score_set_scores_as_csv -from mavedb.lib.script_environment import init_script_environment from mavedb.models.experiment import Experiment from mavedb.models.experiment_set import ExperimentSet from mavedb.models.license import License from mavedb.models.score_set import ScoreSet from mavedb.view_models.experiment_set import ExperimentSetPublicDump -db = init_script_environment() +from mavedb.scripts.environment import script_environment, with_database_session logger = logging.getLogger(__name__) @@ -89,68 +88,73 @@ def flatmap(f: Callable[[S], Iterable[T]], items: Iterable[S]) -> Iterable[T]: return chain.from_iterable(map(f, items)) -logger.info("Fetching data sets") - -experiment_sets_query = db.scalars( - select(ExperimentSet) - .where(ExperimentSet.published_date.is_not(None)) - .options( - lazyload(ExperimentSet.experiments.and_(Experiment.published_date.is_not(None))).options( - lazyload( - Experiment.score_sets.and_( - ScoreSet.published_date.is_not(None), ScoreSet.license.has(License.short_name == "CC0") +@script_environment.command() +@with_database_session +def export_public_data(db: Session): + experiment_sets_query = db.scalars( + select(ExperimentSet) + .where(ExperimentSet.published_date.is_not(None)) + .options( + lazyload(ExperimentSet.experiments.and_(Experiment.published_date.is_not(None))).options( + lazyload( + Experiment.score_sets.and_( + ScoreSet.published_date.is_not(None), ScoreSet.license.has(License.short_name == "CC0") + ) ) ) ) + .execution_options(populate_existing=True) + .order_by(ExperimentSet.urn) + ) + + # Filter the stream of experiment sets to exclude experiments and experiment sets with no public, CC0-licensed score + # sets. + experiment_sets = list(filter_experiment_sets(experiment_sets_query.all())) + + # TODO To support very large data sets, we may want to use custom code for JSON-encoding an iterator. + # Issue: https://github.com/VariantEffect/mavedb-api/issues/192 + # See, for instance, https://stackoverflow.com/questions/12670395/json-encoding-very-long-iterators. + + experiment_set_views = list(map(lambda es: ExperimentSetPublicDump.from_orm(es), experiment_sets)) + + # Get a list of IDS of all the score sets included. + score_set_ids = list( + flatmap(lambda es: flatmap(lambda e: map(lambda ss: ss.id, e.score_sets), es.experiments), experiment_sets) ) - .execution_options(populate_existing=True) - .order_by(ExperimentSet.urn) -) - -# Filter the stream of experiment sets to exclude experiments and experiment sets with no public, CC0-licensed score -# sets. -experiment_sets = list(filter_experiment_sets(experiment_sets_query.all())) - -# TODO To support very large data sets, we may want to use custom code for JSON-encoding an iterator. -# Issue: https://github.com/VariantEffect/mavedb-api/issues/192 -# See, for instance, https://stackoverflow.com/questions/12670395/json-encoding-very-long-iterators. - -experiment_set_views = list(map(lambda es: ExperimentSetPublicDump.from_orm(es), experiment_sets)) - -# Get a list of IDS of all the score sets included. -score_set_ids = list( - flatmap(lambda es: flatmap(lambda e: map(lambda ss: ss.id, e.score_sets), es.experiments), experiment_sets) -) - -timestamp_format = "%Y%m%d%H%M%S" -zip_file_name = f"mavedb-dump.{datetime.now().strftime(timestamp_format)}.zip" - -logger.info(f"Exporting public data set metadata to {zip_file_name}/main.json") -json_data = { - "title": "MaveDB public data", - "asOf": datetime.now(timezone.utc).isoformat(), - "experimentSets": experiment_set_views, -} - -with ZipFile(zip_file_name, "w") as zipfile: - # Write metadata for all data sets to a single JSON file. - zipfile.writestr("main.json", json.dumps(jsonable_encoder(json_data))) - - # Copy the CC0 license. - zipfile.write(os.path.join(os.path.dirname(__file__), "resources/CC0_license.txt"), "LICENSE.txt") - - # Write score and count files for each score set. - num_score_sets = len(score_set_ids) - for i, score_set_id in enumerate(score_set_ids): - score_set = db.scalars(select(ScoreSet).where(ScoreSet.id == score_set_id)).one_or_none() - if score_set is not None and score_set.urn is not None: - logger.info(f"{i + 1}/{num_score_sets} Exporting variants for score set {score_set.urn}") - csv_filename_base = score_set.urn.replace(":", "-") - - csv_str = get_score_set_scores_as_csv(db, score_set) - zipfile.writestr(f"csv/{csv_filename_base}.scores.csv", csv_str) - - count_columns = score_set.dataset_columns["count_columns"] if score_set.dataset_columns else None - if count_columns and len(count_columns) > 0: - csv_str = get_score_set_counts_as_csv(db, score_set) - zipfile.writestr(f"csv/{csv_filename_base}.counts.csv", csv_str) + + timestamp_format = "%Y%m%d%H%M%S" + zip_file_name = f"mavedb-dump.{datetime.now().strftime(timestamp_format)}.zip" + + logger.info(f"Exporting public data set metadata to {zip_file_name}/main.json") + json_data = { + "title": "MaveDB public data", + "asOf": datetime.now(timezone.utc).isoformat(), + "experimentSets": experiment_set_views, + } + + with ZipFile(zip_file_name, "w") as zipfile: + # Write metadata for all data sets to a single JSON file. + zipfile.writestr("main.json", json.dumps(jsonable_encoder(json_data))) + + # Copy the CC0 license. + zipfile.write(os.path.join(os.path.dirname(__file__), "resources/CC0_license.txt"), "LICENSE.txt") + + # Write score and count files for each score set. + num_score_sets = len(score_set_ids) + for i, score_set_id in enumerate(score_set_ids): + score_set = db.scalars(select(ScoreSet).where(ScoreSet.id == score_set_id)).one_or_none() + if score_set is not None and score_set.urn is not None: + logger.info(f"{i + 1}/{num_score_sets} Exporting variants for score set {score_set.urn}") + csv_filename_base = score_set.urn.replace(":", "-") + + csv_str = get_score_set_scores_as_csv(db, score_set) + zipfile.writestr(f"csv/{csv_filename_base}.scores.csv", csv_str) + + count_columns = score_set.dataset_columns["count_columns"] if score_set.dataset_columns else None + if count_columns and len(count_columns) > 0: + csv_str = get_score_set_counts_as_csv(db, score_set) + zipfile.writestr(f"csv/{csv_filename_base}.counts.csv", csv_str) + + +if __name__ == "__main__": + export_public_data() diff --git a/src/mavedb/scripts/populate_mapped_variants.py b/src/mavedb/scripts/populate_mapped_variants.py new file mode 100644 index 00000000..8df46f3d --- /dev/null +++ b/src/mavedb/scripts/populate_mapped_variants.py @@ -0,0 +1,173 @@ +import logging +import click +from datetime import date +from typing import Sequence, Optional + +from sqlalchemy import cast, select +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import Session + +from mavedb.data_providers.services import vrs_mapper +from mavedb.lib.logging.context import format_raised_exception_info_as_dict +from mavedb.models.enums.mapping_state import MappingState +from mavedb.models.score_set import ScoreSet +from mavedb.models.mapped_variant import MappedVariant +from mavedb.models.target_gene import TargetGene +from mavedb.models.variant import Variant + +from mavedb.scripts.environment import script_environment, with_database_session + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + + +def variant_from_mapping(db: Session, mapping: dict, dcd_mapping_version: str) -> MappedVariant: + variant_urn = mapping.get("mavedb_id") + variant = db.scalars(select(Variant).where(Variant.urn == variant_urn)).one() + + return MappedVariant( + variant_id=variant.id, + pre_mapped=mapping.get("pre_mapped"), + post_mapped=mapping.get("post_mapped"), + modification_date=date.today(), + mapped_date=date.today(), # since this is a one-time script, assume mapping was done today + vrs_version=mapping.get("vrs_version"), + mapping_api_version=dcd_mapping_version, + error_message=mapping.get("error_message"), + current=True, + ) + + +@script_environment.command() +@with_database_session +@click.argument("urns", nargs=-1) +@click.option("--all", help="Populate mapped variants for every score set in MaveDB.", is_flag=True) +def populate_mapped_variant_data(db: Session, urns: Sequence[Optional[str]], all: bool): + score_set_ids: Sequence[Optional[int]] + if all: + score_set_ids = db.scalars(select(ScoreSet.id)).all() + logger.info( + f"Command invoked with --all. Routine will populate mapped variant data for {len(urns)} score sets." + ) + else: + score_set_ids = db.scalars(select(ScoreSet.id).where(ScoreSet.urn.in_(urns))).all() + logger.info(f"Populating mapped variant data for the provided score sets ({len(urns)}).") + + vrs = vrs_mapper() + + for idx, ss_id in enumerate(score_set_ids): + if not ss_id: + continue + + score_set = db.scalar(select(ScoreSet).where(ScoreSet.id == ss_id)) + if not score_set: + logger.warning(f"Could not fetch score set with id={ss_id}.") + continue + + try: + existing_mapped_variants = ( + db.query(MappedVariant).join(Variant).join(ScoreSet).filter(MappedVariant.current.is_(True)).all() + ) + + for variant in existing_mapped_variants: + variant.current = False + + assert score_set.urn + logger.info(f"Mapping score set {score_set.urn}.") + mapped_scoreset = vrs.map_score_set(score_set.urn) + logger.info(f"Done mapping score set {score_set.urn}.") + + dcd_mapping_version = mapped_scoreset["dcd_mapping_version"] + mapped_scores = mapped_scoreset.get("mapped_scores") + + if not mapped_scores: + # if there are no mapped scores, the score set failed to map. + score_set.mapping_state = MappingState.failed + score_set.mapping_errors = {"error_message": mapped_scoreset.get("error_message")} + db.commit() + logger.info(f"No mapped variants available for {score_set.urn}.") + else: + computed_genomic_ref = mapped_scoreset.get("computed_genomic_reference_sequence") + mapped_genomic_ref = mapped_scoreset.get("mapped_genomic_reference_sequence") + computed_protein_ref = mapped_scoreset.get("computed_protein_reference_sequence") + mapped_protein_ref = mapped_scoreset.get("mapped_protein_reference_sequence") + + # assumes one target gene per score set, which is currently true in mavedb as of sept. 2024. + target_gene = db.scalars( + select(TargetGene) + .join(ScoreSet) + .where( + ScoreSet.urn == str(score_set.urn), + ) + ).one() + + excluded_pre_mapped_keys = {"sequence"} + if computed_genomic_ref and mapped_genomic_ref: + pre_mapped_metadata = computed_genomic_ref + target_gene.pre_mapped_metadata = cast( + { + "genomic": { + k: pre_mapped_metadata[k] + for k in set(list(pre_mapped_metadata.keys())) - excluded_pre_mapped_keys + } + }, + JSONB, + ) + target_gene.post_mapped_metadata = cast({"genomic": mapped_genomic_ref}, JSONB) + elif computed_protein_ref and mapped_protein_ref: + pre_mapped_metadata = computed_protein_ref + target_gene.pre_mapped_metadata = cast( + { + "protein": { + k: pre_mapped_metadata[k] + for k in set(list(pre_mapped_metadata.keys())) - excluded_pre_mapped_keys + } + }, + JSONB, + ) + target_gene.post_mapped_metadata = cast({"protein": mapped_protein_ref}, JSONB) + else: + raise ValueError(f"incomplete or inconsistent metadata for score set {score_set.urn}") + + mapped_variants = [ + variant_from_mapping(db=db, mapping=mapped_score, dcd_mapping_version=dcd_mapping_version) + for mapped_score in mapped_scores + ] + logger.debug(f"Done constructing {len(mapped_variants)} mapped variant objects.") + + num_successful_variants = len( + [variant for variant in mapped_variants if variant.post_mapped is not None] + ) + logger.debug( + f"{num_successful_variants}/{len(mapped_variants)} variants generated a post-mapped VRS object." + ) + + if num_successful_variants == 0: + score_set.mapping_state = MappingState.failed + score_set.mapping_errors = {"error_message": "All variants failed to map"} + elif num_successful_variants < len(mapped_variants): + score_set.mapping_state = MappingState.incomplete + else: + score_set.mapping_state = MappingState.complete + + db.bulk_save_objects(mapped_variants) + db.commit() + logger.info(f"Done populating {len(mapped_variants)} mapped variants for {score_set.urn}.") + + except Exception as e: + logging_context = { + "mapped_score_sets": urns[:idx], + "unmapped_score_sets": urns[idx:], + } + logging_context = {**logging_context, **format_raised_exception_info_as_dict(e)} + logger.error(f"Score set {score_set.urn} failed to map.", extra=logging_context) + logger.info(f"Rolling back all changes for scoreset {score_set.urn}") + db.rollback() + + logger.info(f"Done with score set {score_set.urn}. ({idx+1}/{len(urns)}).") + + logger.info("Done populating mapped variant data.") + + +if __name__ == "__main__": + populate_mapped_variant_data() diff --git a/src/mavedb/view_models/__init__.py b/src/mavedb/view_models/__init__.py index 2a7b6d45..6d32815b 100644 --- a/src/mavedb/view_models/__init__.py +++ b/src/mavedb/view_models/__init__.py @@ -1,5 +1,6 @@ from typing import Any +from pydantic import validator from pydantic.utils import GetterDict @@ -24,3 +25,12 @@ def get(self, key: Any, default: Any = ...) -> Any: return [assc.publication for assc in pub_assc if assc.primary] else: return super().get(key, default) + + +def record_type_validator(): + return validator("record_type", allow_reuse=True, pre=True, always=True) + + +def set_record_type(cls, v): + # Record type will be set to the class name no matter the input. + return cls.__name__ diff --git a/src/mavedb/view_models/access_key.py b/src/mavedb/view_models/access_key.py index c5328eb9..cf109f25 100644 --- a/src/mavedb/view_models/access_key.py +++ b/src/mavedb/view_models/access_key.py @@ -2,6 +2,7 @@ from typing import Optional from mavedb.models.enums.user_role import UserRole +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel @@ -14,11 +15,14 @@ class AccessKeyBase(BaseModel): # Properties shared by models stored in DB class SavedAccessKey(AccessKeyBase): + record_type: str = None # type: ignore + role: Optional[UserRole] + + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True - role: Optional[UserRole] - # Properties to return to non-admin clients class AccessKey(SavedAccessKey): diff --git a/src/mavedb/view_models/contributor.py b/src/mavedb/view_models/contributor.py index 0e635080..0a86dede 100644 --- a/src/mavedb/view_models/contributor.py +++ b/src/mavedb/view_models/contributor.py @@ -1,5 +1,6 @@ from typing import Optional +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel @@ -18,9 +19,12 @@ class ContributorCreate(ContributorBase): class SavedContributor(ContributorBase): """Base class for contributor view models representing saved records.""" + record_type: str = None # type: ignore given_name: Optional[str] family_name: Optional[str] + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True diff --git a/src/mavedb/view_models/doi_identifier.py b/src/mavedb/view_models/doi_identifier.py index 21d5d1f3..8d664e06 100644 --- a/src/mavedb/view_models/doi_identifier.py +++ b/src/mavedb/view_models/doi_identifier.py @@ -1,6 +1,7 @@ import idutils from mavedb.lib.validation.exceptions import ValidationError +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel, validator @@ -19,8 +20,11 @@ def must_be_valid_doi(cls, v): # Properties shared by models stored in DB class SavedDoiIdentifier(DoiIdentifierBase): id: int + record_type: str = None # type: ignore url: str + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True diff --git a/src/mavedb/view_models/experiment.py b/src/mavedb/view_models/experiment.py index 4caaf54c..4d639e95 100644 --- a/src/mavedb/view_models/experiment.py +++ b/src/mavedb/view_models/experiment.py @@ -3,7 +3,7 @@ from mavedb.lib.validation.exceptions import ValidationError from mavedb.lib.validation.utilities import is_null -from mavedb.view_models import PublicationIdentifiersGetter +from mavedb.view_models import PublicationIdentifiersGetter, record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel, validator from mavedb.view_models.contributor import Contributor, ContributorCreate from mavedb.view_models.doi_identifier import ( @@ -91,6 +91,7 @@ class ExperimentUpdate(ExperimentModify): # Properties shared by models stored in DB class SavedExperiment(ExperimentBase): + record_type: str = None # type: ignore urn: str created_by: SavedUser modified_by: SavedUser @@ -105,6 +106,8 @@ class SavedExperiment(ExperimentBase): contributors: list[Contributor] keywords: Sequence[SavedExperimentControlledKeyword] + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True getter_dict = ExperimentGetter diff --git a/src/mavedb/view_models/experiment_controlled_keyword.py b/src/mavedb/view_models/experiment_controlled_keyword.py index d3385d8b..39934211 100644 --- a/src/mavedb/view_models/experiment_controlled_keyword.py +++ b/src/mavedb/view_models/experiment_controlled_keyword.py @@ -3,7 +3,7 @@ from pydantic import root_validator from mavedb.lib.validation import keywords -from mavedb.view_models import keyword +from mavedb.view_models import keyword, record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel @@ -40,6 +40,10 @@ class ExperimentControlledKeywordUpdate(ExperimentControlledKeywordBase): class SavedExperimentControlledKeyword(ExperimentControlledKeywordBase): """Base class for keyword view models representing saved records.""" + record_type: str = None # type: ignore + + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True diff --git a/src/mavedb/view_models/experiment_set.py b/src/mavedb/view_models/experiment_set.py index d53167f9..f2271df0 100644 --- a/src/mavedb/view_models/experiment_set.py +++ b/src/mavedb/view_models/experiment_set.py @@ -3,6 +3,7 @@ from pydantic.types import Optional +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel from mavedb.view_models.contributor import Contributor from mavedb.view_models.experiment import Experiment, ExperimentPublicDump, SavedExperiment @@ -25,6 +26,7 @@ class ExperimentSetUpdate(ExperimentSetBase): # Properties shared by models stored in DB class SavedExperimentSet(ExperimentSetBase): id: int + record_type: str = None # type: ignore experiments: Sequence[SavedExperiment] created_by: Optional[SavedUser] modified_by: Optional[SavedUser] @@ -32,6 +34,8 @@ class SavedExperimentSet(ExperimentSetBase): modification_date: date contributors: list[Contributor] + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True diff --git a/src/mavedb/view_models/external_gene_identifier.py b/src/mavedb/view_models/external_gene_identifier.py index 6d4aee87..b9917a26 100644 --- a/src/mavedb/view_models/external_gene_identifier.py +++ b/src/mavedb/view_models/external_gene_identifier.py @@ -1,6 +1,7 @@ from typing import Optional from mavedb.lib.validation import identifier as identifier_validator +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel, validator @@ -27,10 +28,13 @@ def validate_identifier(cls, field_value, values, field, config): # Properties shared by models stored in DB class SavedExternalGeneIdentifier(ExternalGeneIdentifierBase): + record_type: str = None # type: ignore db_version: Optional[str] url: Optional[str] reference_html: Optional[str] + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True diff --git a/src/mavedb/view_models/external_gene_identifier_offset.py b/src/mavedb/view_models/external_gene_identifier_offset.py index 71f260b4..bb779b9a 100644 --- a/src/mavedb/view_models/external_gene_identifier_offset.py +++ b/src/mavedb/view_models/external_gene_identifier_offset.py @@ -1,4 +1,4 @@ -from mavedb.view_models import external_gene_identifier +from mavedb.view_models import external_gene_identifier, record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel, validator @@ -19,8 +19,11 @@ def validate_offset(cls, v): # Properties shared by models stored in DB class SavedExternalGeneIdentifierOffset(ExternalGeneIdentifierOffsetBase): + record_type: str = None # type: ignore identifier: external_gene_identifier.SavedExternalGeneIdentifier + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True diff --git a/src/mavedb/view_models/keyword.py b/src/mavedb/view_models/keyword.py index 74133933..df93c6df 100644 --- a/src/mavedb/view_models/keyword.py +++ b/src/mavedb/view_models/keyword.py @@ -4,6 +4,7 @@ from typing import Optional from mavedb.lib.validation import keywords +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel, validator @@ -46,6 +47,10 @@ class KeywordUpdate(KeywordBase): class SavedKeyword(KeywordBase): """Base class for keyword view models representing saved records.""" + record_type: str = None # type: ignore + + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True arbitrary_types_allowed = True diff --git a/src/mavedb/view_models/license.py b/src/mavedb/view_models/license.py index 5c0abef5..85d92a53 100644 --- a/src/mavedb/view_models/license.py +++ b/src/mavedb/view_models/license.py @@ -1,6 +1,7 @@ from datetime import date from typing import Optional +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel @@ -9,6 +10,7 @@ class LicenseBase(BaseModel): long_name: str short_name: str + active: bool link: Optional[str] version: Optional[str] @@ -18,6 +20,9 @@ class SavedLicense(LicenseBase): """Base class for license view models representing saved records.""" id: int + record_type: str = None # type: ignore + + _record_type_factory = record_type_validator()(set_record_type) class Config: orm_mode = True diff --git a/src/mavedb/view_models/mapped_variant.py b/src/mavedb/view_models/mapped_variant.py index 9406519c..397084de 100644 --- a/src/mavedb/view_models/mapped_variant.py +++ b/src/mavedb/view_models/mapped_variant.py @@ -1,7 +1,8 @@ from datetime import date from typing import Any, Optional -from .base.base import BaseModel +from mavedb.view_models import record_type_validator, set_record_type +from mavedb.view_models.base.base import BaseModel class MappedVariantBase(BaseModel): @@ -27,6 +28,9 @@ class MappedVariantUpdate(MappedVariantBase): # Properties shared by models stored in DB class SavedMappedVariant(MappedVariantBase): id: int + record_type: str = None # type: ignore + + _record_type_factory = record_type_validator()(set_record_type) class Config: orm_mode = True diff --git a/src/mavedb/view_models/orcid.py b/src/mavedb/view_models/orcid.py index 1bccc143..f0ddb403 100644 --- a/src/mavedb/view_models/orcid.py +++ b/src/mavedb/view_models/orcid.py @@ -1,5 +1,6 @@ from typing import Optional +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel @@ -16,6 +17,9 @@ class OrcidAuthTokenResponse(BaseModel): class OrcidUser(BaseModel): + record_type: str = None # type: ignore orcid_id: str given_name: Optional[str] family_name: Optional[str] + + _record_type_factory = record_type_validator()(set_record_type) diff --git a/src/mavedb/view_models/publication_identifier.py b/src/mavedb/view_models/publication_identifier.py index 032d81aa..40d0970e 100644 --- a/src/mavedb/view_models/publication_identifier.py +++ b/src/mavedb/view_models/publication_identifier.py @@ -3,6 +3,7 @@ from mavedb.lib.identifiers import PublicationAuthors from mavedb.lib.validation.publication import validate_db_name, validate_publication +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel, validator logger = logging.getLogger(__name__) @@ -27,6 +28,7 @@ def validate_publication_db(cls, v): # Properties of external publication identifiers class ExternalPublicationIdentifier(PublicationIdentifierBase): + record_type: str = None # type: ignore title: str authors: list[PublicationAuthors] @@ -37,6 +39,8 @@ class ExternalPublicationIdentifier(PublicationIdentifierBase): url: Optional[str] reference_html: Optional[str] + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True diff --git a/src/mavedb/view_models/raw_read_identifier.py b/src/mavedb/view_models/raw_read_identifier.py index b8133d79..154a264a 100644 --- a/src/mavedb/view_models/raw_read_identifier.py +++ b/src/mavedb/view_models/raw_read_identifier.py @@ -1,3 +1,4 @@ +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel @@ -12,8 +13,11 @@ class RawReadIdentifierCreate(RawReadIdentifierBase): # Properties shared by models stored in DB class SavedRawReadIdentifier(RawReadIdentifierBase): id: int + record_type: str = None # type: ignore url: str + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True diff --git a/src/mavedb/view_models/score_set.py b/src/mavedb/view_models/score_set.py index 0b201113..6c0bfc1e 100644 --- a/src/mavedb/view_models/score_set.py +++ b/src/mavedb/view_models/score_set.py @@ -13,7 +13,7 @@ from mavedb.lib.validation.utilities import inf_or_float, is_null from mavedb.models.enums.mapping_state import MappingState from mavedb.models.enums.processing_state import ProcessingState -from mavedb.view_models import PublicationIdentifiersGetter +from mavedb.view_models import PublicationIdentifiersGetter, record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel, validator from mavedb.view_models.contributor import Contributor, ContributorCreate from mavedb.view_models.doi_identifier import ( @@ -254,7 +254,6 @@ def wild_type_score_in_normal_range(cls, field_value: Optional[ScoreRanges]): range_model.range for range_model in field_value.ranges if range_model.classification == "normal" ] for range in normal_ranges: - print(range) if field_value.wt_score >= inf_or_float(range[0], lower=True) and field_value.wt_score < inf_or_float( range[1], lower=False ): @@ -342,6 +341,9 @@ class ShortScoreSet(BaseModel): modification_date: date target_genes: list[ShortTargetGene] private: bool + record_type: str = None # type: ignore + + _record_type_factory = record_type_validator()(set_record_type) class Config: orm_mode = True @@ -351,6 +353,9 @@ class Config: class ShorterScoreSet(BaseModel): urn: str + record_type: str = None # type: ignore + + _record_type_factory = record_type_validator()(set_record_type) class Config: orm_mode = True @@ -361,6 +366,7 @@ class Config: class SavedScoreSet(ScoreSetBase): """Base class for score set view models representing saved records.""" + record_type: str = None # type: ignore urn: str num_variants: int license: ShortLicense @@ -382,6 +388,8 @@ class SavedScoreSet(ScoreSetBase): contributors: list[Contributor] score_ranges: Optional[ScoreRanges] + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True arbitrary_types_allowed = True diff --git a/src/mavedb/view_models/target_accession.py b/src/mavedb/view_models/target_accession.py index f3ab78a1..bf78ae25 100644 --- a/src/mavedb/view_models/target_accession.py +++ b/src/mavedb/view_models/target_accession.py @@ -1,6 +1,7 @@ from datetime import date from typing import Optional +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel, validator @@ -31,6 +32,10 @@ class TargetAccessionUpdate(TargetAccessionModify): # Properties shared by models stored in DB class SavedTargetAccession(TargetAccessionBase): + record_type: str = None # type: ignore + + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True arbitrary_types_allowed = True diff --git a/src/mavedb/view_models/target_gene.py b/src/mavedb/view_models/target_gene.py index c69f659f..d74a21e7 100644 --- a/src/mavedb/view_models/target_gene.py +++ b/src/mavedb/view_models/target_gene.py @@ -4,8 +4,8 @@ from pydantic import root_validator from pydantic.utils import GetterDict -from mavedb.lib.validation import target -from mavedb.view_models import external_gene_identifier_offset +from mavedb.models.enums.target_category import TargetCategory +from mavedb.view_models import external_gene_identifier_offset, record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel, validator from mavedb.view_models.target_accession import SavedTargetAccession, TargetAccession, TargetAccessionCreate from mavedb.view_models.target_sequence import ( @@ -40,7 +40,7 @@ class TargetGeneBase(BaseModel): """Base class for target gene view models.""" name: str - category: str + category: TargetCategory external_identifiers: Sequence[external_gene_identifier_offset.ExternalGeneIdentifierOffsetBase] class Config: @@ -48,10 +48,7 @@ class Config: class TargetGeneModify(TargetGeneBase): - @validator("category") - def validate_category(cls, v): - target.validate_target_category(v) - return v + pass class TargetGeneCreate(TargetGeneModify): @@ -82,10 +79,13 @@ class SavedTargetGene(TargetGeneBase): """Base class for target gene view models representing saved records.""" id: int + record_type: str = None # type: ignore target_sequence: Optional[SavedTargetSequence] target_accession: Optional[SavedTargetAccession] external_identifiers: Sequence[external_gene_identifier_offset.SavedExternalGeneIdentifierOffset] + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True arbitrary_types_allowed = True diff --git a/src/mavedb/view_models/target_sequence.py b/src/mavedb/view_models/target_sequence.py index a56325cc..3d20733f 100644 --- a/src/mavedb/view_models/target_sequence.py +++ b/src/mavedb/view_models/target_sequence.py @@ -5,6 +5,7 @@ from mavedb.lib.validation import target from mavedb.lib.validation.exceptions import ValidationError +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel, validator from mavedb.view_models.taxonomy import AdminTaxonomy, SavedTaxonomy, Taxonomy, TaxonomyCreate @@ -66,8 +67,11 @@ class TargetSequenceUpdate(TargetSequenceModify): # Properties shared by models stored in DB class SavedTargetSequence(TargetSequenceBase): + record_type: str = None # type: ignore taxonomy: SavedTaxonomy + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True arbitrary_types_allowed = True diff --git a/src/mavedb/view_models/taxonomy.py b/src/mavedb/view_models/taxonomy.py index ed4c23e8..c3fe79c4 100644 --- a/src/mavedb/view_models/taxonomy.py +++ b/src/mavedb/view_models/taxonomy.py @@ -2,7 +2,8 @@ from pydantic.types import Optional -from .base.base import BaseModel +from mavedb.view_models import record_type_validator, set_record_type +from mavedb.view_models.base.base import BaseModel class TaxonomyBase(BaseModel): @@ -26,8 +27,11 @@ class TaxonomyUpdate(TaxonomyBase): # Properties shared by models stored in DB class SavedTaxonomy(TaxonomyBase): id: int + record_type: str = None # type: ignore url: str + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True diff --git a/src/mavedb/view_models/user.py b/src/mavedb/view_models/user.py index 10818eba..81fba3c1 100644 --- a/src/mavedb/view_models/user.py +++ b/src/mavedb/view_models/user.py @@ -5,6 +5,7 @@ from mavedb.lib.validation.exceptions import ValidationError from mavedb.models.enums.user_role import UserRole +from mavedb.view_models import record_type_validator, set_record_type from mavedb.view_models.base.base import BaseModel, validator @@ -50,6 +51,10 @@ class AdminUserUpdate(CurrentUserUpdate): class SavedUser(UserBase): """Base class for user view models representing saved records.""" + record_type: str = None # type: ignore + + _record_type_factory = record_type_validator()(set_record_type) + class Config: orm_mode = True diff --git a/src/mavedb/view_models/variant.py b/src/mavedb/view_models/variant.py index fa400667..830bdd5c 100644 --- a/src/mavedb/view_models/variant.py +++ b/src/mavedb/view_models/variant.py @@ -3,7 +3,8 @@ from pydantic.types import Optional -from .base.base import BaseModel +from mavedb.view_models import record_type_validator, set_record_type +from mavedb.view_models.base.base import BaseModel class VariantBase(BaseModel): @@ -28,6 +29,9 @@ class VariantUpdate(VariantBase): # Properties shared by models stored in DB class VariantInDbBase(VariantBase): id: int + record_type: str = None # type: ignore + + _record_type_factory = record_type_validator()(set_record_type) class Config: orm_mode = True diff --git a/tests/helpers/constants.py b/tests/helpers/constants.py index 580294b6..c6c88269 100644 --- a/tests/helpers/constants.py +++ b/tests/helpers/constants.py @@ -13,6 +13,28 @@ VALID_ACCESSION = "NM_001637.3" VALID_GENE = "BRCA1" +SAVED_PUBMED_PUBLICATION = { + "recordType": "PublicationIdentifier", + "identifier": "20711194", + "dbName": "PubMed", + "title": "None", + "authors": [], + "abstract": "test", + "doi": "test", + "publicationYear": 1999, + "publicationJournal": "test", + "url": "http://www.ncbi.nlm.nih.gov/pubmed/20711194", + "referenceHtml": ". None. test. 1999; (Unknown volume):(Unknown pages). test", + "id": 1, +} + +SAVED_DOI_IDENTIFIER = { + "recordType": "DoiIdentifier", + "identifier": TEST_CROSSREF_IDENTIFIER, + "url": f"https://doi.org/{TEST_CROSSREF_IDENTIFIER}", + "id": 1, +} + TEST_USER = { "username": "0000-1111-2222-3333", "first_name": "First", @@ -24,6 +46,19 @@ "is_first_login": True, } +CONTRIBUTOR = { + "orcid_id": TEST_USER["username"], + "given_name": TEST_USER["first_name"], + "family_name": TEST_USER["last_name"], +} + +SAVED_CONTRIBUTOR = { + "recordType": "Contributor", + "orcidId": TEST_USER["username"], + "givenName": TEST_USER["first_name"], + "familyName": TEST_USER["last_name"], +} + TEST_USER_DECODED_JWT = { "sub": TEST_USER["username"], "given_name": TEST_USER["first_name"], @@ -41,6 +76,19 @@ "is_first_login": True, } +EXTRA_CONTRIBUTOR = { + "orcid_id": EXTRA_USER["username"], + "given_name": EXTRA_USER["first_name"], + "family_name": EXTRA_USER["last_name"], +} + +SAVED_EXTRA_CONTRIBUTOR = { + "recordType": "Contributor", + "orcidId": EXTRA_USER["username"], + "givenName": EXTRA_USER["first_name"], + "familyName": EXTRA_USER["last_name"], +} + EXTRA_USER_DECODED_JWT = { "sub": EXTRA_USER["username"], "given_name": EXTRA_USER["first_name"], @@ -167,16 +215,19 @@ } TEST_MINIMAL_EXPERIMENT_RESPONSE = { + "recordType": "Experiment", "title": "Test Experiment Title", "shortDescription": "Test experiment", "abstractText": "Abstract", "methodText": "Methods", "createdBy": { + "recordType": "User", "firstName": TEST_USER["first_name"], "lastName": TEST_USER["last_name"], "orcidId": TEST_USER["username"], }, "modifiedBy": { + "recordType": "User", "firstName": TEST_USER["first_name"], "lastName": TEST_USER["last_name"], "orcidId": TEST_USER["username"], @@ -196,16 +247,19 @@ } TEST_EXPERIMENT_WITH_KEYWORD_RESPONSE = { + "recordType": "Experiment", "title": "Test Experiment Title", "shortDescription": "Test experiment", "abstractText": "Abstract", "methodText": "Methods", "createdBy": { + "recordType": "User", "firstName": TEST_USER["first_name"], "lastName": TEST_USER["last_name"], "orcidId": TEST_USER["username"], }, "modifiedBy": { + "recordType": "User", "firstName": TEST_USER["first_name"], "lastName": TEST_USER["last_name"], "orcidId": TEST_USER["username"], @@ -216,6 +270,7 @@ "contributors": [], "keywords": [ { + "recordType": "ExperimentControlledKeyword", "keyword": {"key": "Delivery method", "value": "Other", "special": False, "description": "Description"}, "description": "Details of delivery method", }, @@ -230,16 +285,19 @@ } TEST_EXPERIMENT_WITH_KEYWORD_HAS_DUPLICATE_OTHERS_RESPONSE = { + "recordType": "Experiment", "title": "Test Experiment Title", "shortDescription": "Test experiment", "abstractText": "Abstract", "methodText": "Methods", "createdBy": { + "recordType": "User", "firstName": TEST_USER["first_name"], "lastName": TEST_USER["last_name"], "orcidId": TEST_USER["username"], }, "modifiedBy": { + "recordType": "User", "firstName": TEST_USER["first_name"], "lastName": TEST_USER["last_name"], "orcidId": TEST_USER["username"], @@ -250,6 +308,7 @@ "contributors": [], "keywords": [ { + "recordType": "ExperimentControlledKeyword", "keyword": { "key": "Variant Library Creation Method", "value": "Other", @@ -259,6 +318,7 @@ "description": "Description", }, { + "recordType": "ExperimentControlledKeyword", "keyword": {"key": "Delivery method", "value": "Other", "special": False, "description": "Description"}, "description": "Description", }, @@ -291,6 +351,57 @@ "text": "Don't be evil.", "link": "localhost", "version": "1.0", + "active": True, +} + +SAVED_SHORT_TEST_LICENSE = { + "recordType": "ShortLicense", + "id": TEST_LICENSE["id"], + "shortName": TEST_LICENSE["short_name"], + "longName": TEST_LICENSE["long_name"], + "link": TEST_LICENSE["link"], + "version": TEST_LICENSE["version"], + "active": TEST_LICENSE["active"], +} + +EXTRA_LICENSE = { + "id": 2, + "short_name": "Extra", + "long_name": "License", + "text": "Don't be tooooo evil.", + "link": "localhost", + "version": "1.0", + "active": True, +} + +SAVED_SHORT_EXTRA_LICENSE = { + "recordType": "ShortLicense", + "id": EXTRA_LICENSE["id"], + "shortName": EXTRA_LICENSE["short_name"], + "longName": EXTRA_LICENSE["long_name"], + "link": EXTRA_LICENSE["link"], + "version": EXTRA_LICENSE["version"], + "active": EXTRA_LICENSE["active"], +} + +TEST_INACTIVE_LICENSE = { + "id": 3, + "short_name": "Long", + "long_name": "Short", + "text": "Be evil.", + "link": "localhost", + "version": "1.0", + "active": False, +} + +SAVED_SHORT_INACTIVE_LICENSE = { + "recordType": "ShortLicense", + "id": TEST_INACTIVE_LICENSE["id"], + "shortName": TEST_INACTIVE_LICENSE["short_name"], + "longName": TEST_INACTIVE_LICENSE["long_name"], + "link": TEST_INACTIVE_LICENSE["link"], + "version": TEST_INACTIVE_LICENSE["version"], + "active": TEST_INACTIVE_LICENSE["active"], } TEST_SEQ_SCORESET = { @@ -301,7 +412,7 @@ "target_genes": [ { "name": "TEST1", - "category": "Protein coding", + "category": "protein_coding", "external_identifiers": [], "target_sequence": { "sequence_type": "dna", @@ -327,7 +438,7 @@ "targetGenes": [ { "name": "TEST1", - "category": "Protein coding", + "category": "protein_coding", "externalIdentifiers": [], "targetSequence": { "sequenceType": "dna", @@ -348,35 +459,44 @@ } TEST_MINIMAL_SEQ_SCORESET_RESPONSE = { + "recordType": "ScoreSet", "title": "Test Score Set Title", "shortDescription": "Test score set", "abstractText": "Abstract", "methodText": "Methods", "createdBy": { + "recordType": "User", "firstName": TEST_USER["first_name"], "lastName": TEST_USER["last_name"], "orcidId": TEST_USER["username"], }, "modifiedBy": { + "recordType": "User", "firstName": TEST_USER["first_name"], "lastName": TEST_USER["last_name"], "orcidId": TEST_USER["username"], }, "creationDate": date.today().isoformat(), "modificationDate": date.today().isoformat(), - "license": {camelize(k): v for k, v in TEST_LICENSE.items() if k not in ("text",)}, + "license": { + "recordType": "ShortLicense", + **{camelize(k): v for k, v in TEST_LICENSE.items() if k not in ("text",)}, + }, "numVariants": 0, "targetGenes": [ { + "recordType": "TargetGene", "name": "TEST1", - "category": "Protein coding", + "category": "protein_coding", "externalIdentifiers": [], "id": 1, "targetSequence": { + "recordType": "TargetSequence", "sequenceType": "dna", "sequence": "ACGTTT", "label": "TEST1", "taxonomy": { + "recordType": "Taxonomy", "taxId": TEST_TAXONOMY["tax_id"], "organismName": TEST_TAXONOMY["organism_name"], "commonName": TEST_TAXONOMY["common_name"], @@ -413,7 +533,7 @@ "targetGenes": [ { "name": "TEST2", - "category": "Protein coding", + "category": "protein_coding", "externalIdentifiers": [], "targetAccession": {"accession": VALID_ACCESSION, "assembly": "GRCh37", "gene": VALID_GENE}, } @@ -428,7 +548,7 @@ "target_genes": [ { "name": "TEST2", - "category": "Protein coding", + "category": "protein_coding", "external_identifiers": [], "target_accession": {"accession": VALID_ACCESSION, "assembly": "GRCh37", "gene": VALID_GENE}, } @@ -436,30 +556,43 @@ } TEST_MINIMAL_ACC_SCORESET_RESPONSE = { + "recordType": "ScoreSet", "title": "Test Score Set Acc Title", "shortDescription": "Test accession score set", "abstractText": "Abstract", "methodText": "Methods", "createdBy": { + "recordType": "User", "firstName": TEST_USER["first_name"], "lastName": TEST_USER["last_name"], "orcidId": TEST_USER["username"], }, "modifiedBy": { + "recordType": "User", "firstName": TEST_USER["first_name"], "lastName": TEST_USER["last_name"], "orcidId": TEST_USER["username"], }, "creationDate": date.today().isoformat(), "modificationDate": date.today().isoformat(), - "license": {camelize(k): v for k, v in TEST_LICENSE.items() if k not in ("text",)}, + "license": { + "recordType": "ShortLicense", + **{camelize(k): v for k, v in TEST_LICENSE.items() if k not in ("text",)}, + }, "numVariants": 0, "targetGenes": [ { + "recordType": "TargetGene", "name": "TEST2", - "category": "Protein coding", + "id": 2, + "category": "protein_coding", "externalIdentifiers": [], - "targetAccession": {"accession": VALID_ACCESSION, "assembly": "GRCh37", "gene": VALID_GENE}, + "targetAccession": { + "recordType": "TargetAccession", + "accession": VALID_ACCESSION, + "assembly": "GRCh37", + "gene": VALID_GENE, + }, } ], "metaAnalyzesScoreSetUrns": [], @@ -515,3 +648,20 @@ "dcd_mapping_version": "pytest.0.0", "mapped_date_utc": datetime.isoformat(datetime.now()), } + + +TEST_SCORESET_RANGE = { + "wt_score": 1.0, + "ranges": [ + {"label": "test1", "classification": "normal", "range": (0, 2.0)}, + {"label": "test2", "classification": "abnormal", "range": (-2.0, 0)}, + ], +} + +TEST_SAVED_SCORESET_RANGE = { + "wtScore": 1.0, + "ranges": [ + {"label": "test1", "classification": "normal", "range": [0.0, 2.0]}, + {"label": "test2", "classification": "abnormal", "range": [-2.0, 0.0]}, + ], +} diff --git a/tests/helpers/util.py b/tests/helpers/util.py index cda39c99..108ec983 100644 --- a/tests/helpers/util.py +++ b/tests/helpers/util.py @@ -5,12 +5,14 @@ import jsonschema from arq import ArqRedis from sqlalchemy import select +from sqlalchemy.exc import NoResultFound from mavedb.lib.score_sets import columns_for_dataset, create_variants, create_variants_data, csv_data_to_df from mavedb.lib.validation.dataframe import validate_and_standardize_dataframe_pair from mavedb.models.contributor import Contributor from mavedb.models.enums.processing_state import ProcessingState from mavedb.models.score_set import ScoreSet as ScoreSetDbModel +from mavedb.models.license import License from mavedb.models.user import User from mavedb.view_models.experiment import Experiment, ExperimentCreate from mavedb.view_models.score_set import ScoreSet, ScoreSetCreate @@ -27,8 +29,13 @@ def add_contributor(db, urn, model, orcid_id: str, given_name: str, family_name: """Without making an API call, add a new contributor to the record (experiment or score set) with given urn and model.""" item = db.query(model).filter(model.urn == urn).one_or_none() assert item is not None - contributor = Contributor(orcid_id=orcid_id, given_name=given_name, family_name=family_name) - db.add(contributor) + + try: + contributor = db.execute(select(Contributor).where(Contributor.orcid_id == orcid_id)).one() + except NoResultFound: + contributor = Contributor(orcid_id=orcid_id, given_name=given_name, family_name=family_name) + db.add(contributor) + item.contributors = [contributor] db.add(item) db.commit() @@ -46,6 +53,17 @@ def change_ownership(db, urn, model): db.commit() +def change_to_inactive_license(db, urn): + """Change the license of the score set with given urn to an inactive license.""" + item = db.query(ScoreSetDbModel).filter(ScoreSetDbModel.urn == urn).one_or_none() + assert item is not None + license = db.query(License).filter(License.active.is_(False)).first() + assert license is not None + item.license_id = license.id + db.add(item) + db.commit() + + def create_experiment(client, update=None): experiment_payload = deepcopy(TEST_MINIMAL_EXPERIMENT) if update is not None: @@ -214,3 +232,16 @@ def mark_user_inactive(session, username): async def awaitable_exception(): return Exception() + + +def update_expected_response_for_created_resources(expected_response, created_experiment, created_score_set): + expected_response.update({"urn": created_score_set["urn"]}) + expected_response["experiment"].update( + { + "urn": created_experiment["urn"], + "experimentSetUrn": created_experiment["experimentSetUrn"], + "scoreSetUrns": [created_score_set["urn"]], + } + ) + + return expected_response diff --git a/tests/lib/conftest.py b/tests/lib/conftest.py index 94860c35..076dac4b 100644 --- a/tests/lib/conftest.py +++ b/tests/lib/conftest.py @@ -5,7 +5,14 @@ from mavedb.models.role import Role from mavedb.models.taxonomy import Taxonomy from mavedb.models.user import User -from tests.helpers.constants import ADMIN_USER, EXTRA_USER, TEST_LICENSE, TEST_TAXONOMY, TEST_USER +from tests.helpers.constants import ( + ADMIN_USER, + EXTRA_USER, + TEST_LICENSE, + TEST_INACTIVE_LICENSE, + TEST_TAXONOMY, + TEST_USER, +) @pytest.fixture @@ -20,4 +27,5 @@ def setup_lib_db(session): db.add(User(**ADMIN_USER, role_objs=[Role(name=UserRole.admin)])) db.add(Taxonomy(**TEST_TAXONOMY)) db.add(License(**TEST_LICENSE)) + db.add(License(**TEST_INACTIVE_LICENSE)) db.commit() diff --git a/tests/routers/conftest.py b/tests/routers/conftest.py index 5317ee0c..f16ff93b 100644 --- a/tests/routers/conftest.py +++ b/tests/routers/conftest.py @@ -6,6 +6,7 @@ import pytest from mavedb.models.controlled_keyword import ControlledKeyword +from mavedb.models.contributor import Contributor from mavedb.models.enums.user_role import UserRole from mavedb.models.license import License from mavedb.models.role import Role @@ -14,9 +15,12 @@ from tests.helpers.constants import ( ADMIN_USER, EXTRA_USER, + EXTRA_CONTRIBUTOR, TEST_CDOT_TRANSCRIPT, TEST_DB_KEYWORDS, TEST_LICENSE, + TEST_INACTIVE_LICENSE, + EXTRA_LICENSE, TEST_TAXONOMY, TEST_USER, ) @@ -41,6 +45,9 @@ def setup_router_db(session): db.add(User(**ADMIN_USER, role_objs=[Role(name=UserRole.admin)])) db.add(Taxonomy(**TEST_TAXONOMY)) db.add(License(**TEST_LICENSE)) + db.add(License(**TEST_INACTIVE_LICENSE)) + db.add(License(**EXTRA_LICENSE)) + db.add(Contributor(**EXTRA_CONTRIBUTOR)) db.bulk_save_objects([ControlledKeyword(**keyword_obj) for keyword_obj in TEST_DB_KEYWORDS]) db.commit() diff --git a/tests/routers/data/counts_utf8_encoded.csv b/tests/routers/data/counts_utf8_encoded.csv new file mode 100644 index 00000000..ed263ef2 --- /dev/null +++ b/tests/routers/data/counts_utf8_encoded.csv @@ -0,0 +1,4 @@ +hgvs_nt,hgvs_pro,c_0,c_1 +c.1A>T,p.Thr1Ser,10,20 +c.2C>T,p.Thr1Met,8,8 +c.6T>A,p.Phe2Leu,90,2 \ No newline at end of file diff --git a/tests/routers/data/scores_utf8_encoded.csv b/tests/routers/data/scores_utf8_encoded.csv new file mode 100644 index 00000000..f9626844 --- /dev/null +++ b/tests/routers/data/scores_utf8_encoded.csv @@ -0,0 +1,4 @@ +hgvs_nt,hgvs_pro,score +c.1A>T,p.Thr1Ser,0.3 +c.2C>T,p.Thr1Met,0 +c.6T>A,p.Phe2Leu,-1.65 \ No newline at end of file diff --git a/tests/routers/test_experiments.py b/tests/routers/test_experiments.py index fa390bfc..51dde99e 100644 --- a/tests/routers/test_experiments.py +++ b/tests/routers/test_experiments.py @@ -74,6 +74,7 @@ def test_create_experiment_with_contributor(client, setup_router_db): expected_response.update({"urn": response_data["urn"], "experimentSetUrn": response_data["experimentSetUrn"]}) expected_response["contributors"] = [ { + "recordType": "Contributor", "orcidId": TEST_ORCID_ID, "givenName": "ORCID", "familyName": "User", @@ -723,6 +724,7 @@ def test_create_experiment_with_new_primary_pubmed_publication(client, setup_rou "identifier", "title", "url", + "recordType", "referenceHtml", "publicationJournal", "publicationYear", @@ -753,6 +755,7 @@ def test_create_experiment_with_new_primary_preprint_publication(client, setup_r "identifier", "title", "url", + "recordType", "referenceHtml", "doi", "publicationJournal", @@ -783,6 +786,7 @@ def test_create_experiment_with_new_primary_crossref_publication(client, setup_r "identifier", "title", "url", + "recordType", "referenceHtml", "doi", "publicationJournal", diff --git a/tests/routers/test_licenses.py b/tests/routers/test_licenses.py new file mode 100644 index 00000000..97c487a3 --- /dev/null +++ b/tests/routers/test_licenses.py @@ -0,0 +1,47 @@ +import pytest + +from tests.helpers.constants import TEST_LICENSE +from tests.helpers.dependency_overrider import DependencyOverrider + + +@pytest.mark.parametrize("user_overrides", [None, "anonymous_app_overrides", "admin_app_overrides"]) +def test_can_list_licenses_as_any_user_class(setup_router_db, client, user_overrides, request): + if user_overrides is not None: + dep_overrides = request.getfixturevalue(user_overrides) + with DependencyOverrider(dep_overrides): + response = client.get("/api/v1/licenses/") + else: + response = client.get("/api/v1/licenses/") + + assert response.status_code == 200 + response_value = response.json() + assert len(response_value) == 3 + + +@pytest.mark.parametrize("user_overrides", [None, "anonymous_app_overrides", "admin_app_overrides"]) +def test_can_list_active_licenses_as_any_user_class(setup_router_db, client, user_overrides, request): + if user_overrides is not None: + dep_overrides = request.getfixturevalue(user_overrides) + with DependencyOverrider(dep_overrides): + response = client.get("/api/v1/licenses/active") + else: + response = client.get("/api/v1/licenses/active") + + assert response.status_code == 200 + response_value = response.json() + assert len(response_value) == 2 + license_state = [_license["active"] for _license in response_value] + assert all(license_state) + + +def test_can_fetch_arbitrary_license(setup_router_db, client): + response = client.get("/api/v1/licenses/1") + + assert response.status_code == 200 + response_value = response.json() + response_value["text"] == TEST_LICENSE["text"] + + +def test_cannot_fetch_nonexistent_license(setup_router_db, client): + response = client.get("/api/v1/licenses/100") + assert response.status_code == 404 diff --git a/tests/routers/test_score_set.py b/tests/routers/test_score_set.py index 44207f97..1b64683f 100644 --- a/tests/routers/test_score_set.py +++ b/tests/routers/test_score_set.py @@ -4,32 +4,54 @@ from unittest.mock import patch import jsonschema +import pytest from arq import ArqRedis +from humps import camelize +from sqlalchemy import select -from mavedb.lib.validation.urn_re import MAVEDB_TMP_URN_RE +from mavedb.lib.validation.urn_re import MAVEDB_TMP_URN_RE, MAVEDB_SCORE_SET_URN_RE, MAVEDB_EXPERIMENT_URN_RE from mavedb.models.enums.processing_state import ProcessingState from mavedb.models.experiment import Experiment as ExperimentDbModel from mavedb.models.score_set import ScoreSet as ScoreSetDbModel +from mavedb.models.variant import Variant as VariantDbModel from mavedb.view_models.orcid import OrcidUser from mavedb.view_models.score_set import ScoreSet, ScoreSetCreate from tests.helpers.constants import ( EXTRA_USER, + EXTRA_LICENSE, + TEST_CROSSREF_IDENTIFIER, TEST_MINIMAL_ACC_SCORESET, TEST_MINIMAL_SEQ_SCORESET, TEST_MINIMAL_SEQ_SCORESET_RESPONSE, + TEST_PUBMED_IDENTIFIER, TEST_ORCID_ID, + TEST_SCORESET_RANGE, + TEST_SAVED_SCORESET_RANGE, + TEST_MINIMAL_ACC_SCORESET_RESPONSE, TEST_USER, + TEST_INACTIVE_LICENSE, + SAVED_DOI_IDENTIFIER, + SAVED_EXTRA_CONTRIBUTOR, + SAVED_PUBMED_PUBLICATION, + SAVED_SHORT_EXTRA_LICENSE, ) from tests.helpers.dependency_overrider import DependencyOverrider from tests.helpers.util import ( add_contributor, change_ownership, + change_to_inactive_license, create_experiment, create_seq_score_set, create_seq_score_set_with_variants, + update_expected_response_for_created_resources, ) +######################################################################################################################## +# Score set schemas +######################################################################################################################## + + def test_TEST_MINIMAL_SEQ_SCORESET_is_valid(): jsonschema.validate(instance=TEST_MINIMAL_SEQ_SCORESET, schema=ScoreSetCreate.schema()) @@ -38,27 +60,31 @@ def test_TEST_MINIMAL_ACC_SCORESET_is_valid(): jsonschema.validate(instance=TEST_MINIMAL_ACC_SCORESET, schema=ScoreSetCreate.schema()) +######################################################################################################################## +# Score set creation +######################################################################################################################## + + def test_create_minimal_score_set(client, setup_router_db): experiment = create_experiment(client) score_set_post_payload = deepcopy(TEST_MINIMAL_SEQ_SCORESET) score_set_post_payload["experimentUrn"] = experiment["urn"] + response = client.post("/api/v1/score-sets/", json=score_set_post_payload) assert response.status_code == 200 response_data = response.json() + jsonschema.validate(instance=response_data, schema=ScoreSet.schema()) assert isinstance(MAVEDB_TMP_URN_RE.fullmatch(response_data["urn"]), re.Match) - expected_response = deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE) - expected_response.update({"urn": response_data["urn"]}) - expected_response["experiment"].update( - { - "urn": experiment["urn"], - "experimentSetUrn": experiment["experimentSetUrn"], - "scoreSetUrns": [response_data["urn"]], - } + + expected_response = update_expected_response_for_created_resources( + deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE), experiment, response_data ) + assert sorted(expected_response.keys()) == sorted(response_data.keys()) for key in expected_response: assert (key, expected_response[key]) == (key, response_data[key]) + response = client.get(f"/api/v1/score-sets/{response_data['urn']}") assert response.status_code == 200 @@ -77,27 +103,26 @@ def test_create_score_set_with_contributor(client, setup_router_db): assert response.status_code == 200 response_data = response.json() + jsonschema.validate(instance=response_data, schema=ScoreSet.schema()) assert isinstance(MAVEDB_TMP_URN_RE.fullmatch(response_data["urn"]), re.Match) - expected_response = deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE) - expected_response.update({"urn": response_data["urn"]}) - expected_response["experiment"].update( - { - "urn": experiment["urn"], - "experimentSetUrn": experiment["experimentSetUrn"], - "scoreSetUrns": [response_data["urn"]], - } + + expected_response = update_expected_response_for_created_resources( + deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE), experiment, response_data ) expected_response["contributors"] = [ { + "recordType": "Contributor", "orcidId": TEST_ORCID_ID, "givenName": "ORCID", "familyName": "User", } ] + assert sorted(expected_response.keys()) == sorted(response_data.keys()) for key in expected_response: assert (key, expected_response[key]) == (key, response_data[key]) + response = client.get(f"/api/v1/score-sets/{response_data['urn']}") assert response.status_code == 200 @@ -106,57 +131,24 @@ def test_create_score_set_with_score_range(client, setup_router_db): experiment = create_experiment(client) score_set = deepcopy(TEST_MINIMAL_SEQ_SCORESET) score_set["experimentUrn"] = experiment["urn"] - score_set.update( - { - "score_ranges": { - "wt_score": 0.5, - "ranges": [ - {"label": "range_1", "range": (-2, 2), "classification": "normal"}, - {"label": "range_2", "range": (2, None), "classification": "abnormal"}, - { - "label": "custom_1", - "range": (None, -2), - "classification": "abnormal", - "description": "A user provided custom range", - }, - ], - } - } - ) + score_set.update({"score_ranges": TEST_SCORESET_RANGE}) response = client.post("/api/v1/score-sets/", json=score_set) assert response.status_code == 200 - response_data = response.json() + jsonschema.validate(instance=response_data, schema=ScoreSet.schema()) assert isinstance(MAVEDB_TMP_URN_RE.fullmatch(response_data["urn"]), re.Match) - expected_response = deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE) - expected_response.update({"urn": response_data["urn"]}) - expected_response["experiment"].update( - { - "urn": experiment["urn"], - "experimentSetUrn": experiment["experimentSetUrn"], - "scoreSetUrns": [response_data["urn"]], - } + expected_response = update_expected_response_for_created_resources( + deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE), experiment, response_data ) - expected_response["scoreRanges"] = { - "wtScore": 0.5, - "ranges": [ - {"label": "range_1", "range": [-2, 2], "classification": "normal"}, - {"label": "range_2", "range": [2, None], "classification": "abnormal"}, - { - "label": "custom_1", - "range": [None, -2], - "classification": "abnormal", - "description": "A user provided custom range", - }, - ], - } + expected_response["scoreRanges"] = TEST_SAVED_SCORESET_RANGE assert sorted(expected_response.keys()) == sorted(response_data.keys()) for key in expected_response: assert (key, expected_response[key]) == (key, response_data[key]) + response = client.get(f"/api/v1/score-sets/{response_data['urn']}") assert response.status_code == 200 @@ -172,21 +164,233 @@ def test_cannot_create_score_set_without_email(client, setup_router_db): assert response_data["detail"] in "There must be an email address associated with your account to use this feature." -def test_get_own_private_score_set(client, setup_router_db): +def test_cannot_create_score_set_with_invalid_target_gene_category(client, setup_router_db): + experiment = create_experiment(client) + score_set_post_payload = deepcopy(TEST_MINIMAL_SEQ_SCORESET) + score_set_post_payload["experimentUrn"] = experiment["urn"] + score_set_post_payload["targetGenes"][0]["category"] = "some_invalid_target_category" + response = client.post("/api/v1/score-sets/", json=score_set_post_payload) + assert response.status_code == 422 + response_data = response.json() + assert "value is not a valid enumeration member;" in response_data["detail"][0]["msg"] + + +######################################################################################################################## +# Score set updating +######################################################################################################################## + + +@pytest.mark.parametrize( + "attribute,updated_data,expected_response_data", + [ + ("title", "Updated Title", "Updated Title"), + ("method_text", "Updated Method Text", "Updated Method Text"), + ("abstract_text", "Updated Abstract Text", "Updated Abstract Text"), + ("short_description", "Updated Abstract Text", "Updated Abstract Text"), + ("extra_metadata", {"updated": "metadata"}, {"updated": "metadata"}), + ("data_usage_policy", "data_usage_policy", "data_usage_policy"), + ("contributors", [{"orcid_id": EXTRA_USER["username"]}], [SAVED_EXTRA_CONTRIBUTOR]), + ("primary_publication_identifiers", [{"identifier": TEST_PUBMED_IDENTIFIER}], [SAVED_PUBMED_PUBLICATION]), + ("secondary_publication_identifiers", [{"identifier": TEST_PUBMED_IDENTIFIER}], [SAVED_PUBMED_PUBLICATION]), + ("doi_identifiers", [{"identifier": TEST_CROSSREF_IDENTIFIER}], [SAVED_DOI_IDENTIFIER]), + ("license_id", EXTRA_LICENSE["id"], SAVED_SHORT_EXTRA_LICENSE), + ("target_genes", TEST_MINIMAL_ACC_SCORESET["targetGenes"], TEST_MINIMAL_ACC_SCORESET_RESPONSE["targetGenes"]), + ("score_ranges", TEST_SCORESET_RANGE, TEST_SAVED_SCORESET_RANGE), + ], +) +@pytest.mark.parametrize( + "mock_publication_fetch", + [({"dbName": "PubMed", "identifier": f"{TEST_PUBMED_IDENTIFIER}"})], + indirect=["mock_publication_fetch"], +) +def test_can_update_score_set_data_before_publication( + client, setup_router_db, attribute, updated_data, expected_response_data, mock_publication_fetch +): experiment = create_experiment(client) score_set = create_seq_score_set(client, experiment["urn"]) - expected_response = deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE) - expected_response.update({"urn": score_set["urn"]}) - expected_response["experiment"].update( + expected_response = update_expected_response_for_created_resources( + deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE), experiment, score_set + ) + + response = client.get(f"/api/v1/score-sets/{score_set['urn']}") + assert response.status_code == 200 + response_data = response.json() + + assert sorted(expected_response.keys()) == sorted(response_data.keys()) + for key in expected_response: + assert (key, expected_response[key]) == (key, response_data[key]) + + score_set_update_payload = deepcopy(TEST_MINIMAL_SEQ_SCORESET) + score_set_update_payload.update({camelize(attribute): updated_data}) + response = client.put(f"/api/v1/score-sets/{score_set['urn']}", json=score_set_update_payload) + assert response.status_code == 200 + + response = client.get(f"/api/v1/score-sets/{score_set['urn']}") + assert response.status_code == 200 + response_data = response.json() + + # Although the client provides the license id, the response includes the full license. + if attribute == "license_id": + attribute = "license" + + assert expected_response_data == response_data[camelize(attribute)] + + +@pytest.mark.parametrize( + "attribute,updated_data,expected_response_data", + [ + ("title", "Updated Title", "Updated Title"), + ("method_text", "Updated Method Text", "Updated Method Text"), + ("abstract_text", "Updated Abstract Text", "Updated Abstract Text"), + ("short_description", "Updated Abstract Text", "Updated Abstract Text"), + ("extra_metadata", {"updated": "metadata"}, {"updated": "metadata"}), + ("data_usage_policy", "data_usage_policy", "data_usage_policy"), + ("contributors", [{"orcid_id": EXTRA_USER["username"]}], [SAVED_EXTRA_CONTRIBUTOR]), + ("primary_publication_identifiers", [{"identifier": TEST_PUBMED_IDENTIFIER}], [SAVED_PUBMED_PUBLICATION]), + ("secondary_publication_identifiers", [{"identifier": TEST_PUBMED_IDENTIFIER}], [SAVED_PUBMED_PUBLICATION]), + ("doi_identifiers", [{"identifier": TEST_CROSSREF_IDENTIFIER}], [SAVED_DOI_IDENTIFIER]), + ("license_id", EXTRA_LICENSE["id"], SAVED_SHORT_EXTRA_LICENSE), + ], +) +@pytest.mark.parametrize( + "mock_publication_fetch", + [({"dbName": "PubMed", "identifier": f"{TEST_PUBMED_IDENTIFIER}"})], + indirect=["mock_publication_fetch"], +) +def test_can_update_score_set_supporting_data_after_publication( + session, + data_provider, + client, + setup_router_db, + attribute, + updated_data, + expected_response_data, + mock_publication_fetch, + data_files, +): + experiment = create_experiment(client) + score_set = create_seq_score_set_with_variants( + client, session, data_provider, experiment["urn"], data_files / "scores.csv" + ) + + response = client.post(f"/api/v1/score-sets/{score_set['urn']}/publish") + assert response.status_code == 200 + published_urn = response.json()["urn"] + response = client.get(f"/api/v1/score-sets/{published_urn}") + assert response.status_code == 200 + response_data = response.json() + + expected_response = update_expected_response_for_created_resources( + deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE), response_data["experiment"], response_data + ) + expected_response["experiment"].update({"publishedDate": date.today().isoformat()}) + expected_response.update( { - "urn": experiment["urn"], - "experimentSetUrn": experiment["experimentSetUrn"], - "scoreSetUrns": [score_set["urn"]], + "urn": published_urn, + "publishedDate": date.today().isoformat(), + "numVariants": 3, + "private": False, + "datasetColumns": {"countColumns": [], "scoreColumns": ["score"]}, + "processingState": ProcessingState.success.name, } ) + + assert sorted(expected_response.keys()) == sorted(response_data.keys()) + for key in expected_response: + assert (key, expected_response[key]) == (key, response_data[key]) + + score_set_update_payload = deepcopy(TEST_MINIMAL_SEQ_SCORESET) + score_set_update_payload.update({camelize(attribute): updated_data}) + response = client.put(f"/api/v1/score-sets/{published_urn}", json=score_set_update_payload) + assert response.status_code == 200 + + response = client.get(f"/api/v1/score-sets/{published_urn}") + assert response.status_code == 200 + response_data = response.json() + + # Although the client provides the license id, the response includes the full license. + if attribute == "license_id": + attribute = "license" + + assert expected_response_data == response_data[camelize(attribute)] + + +@pytest.mark.parametrize( + "attribute,updated_data,expected_response_data", + [ + ("target_genes", TEST_MINIMAL_ACC_SCORESET["targetGenes"], TEST_MINIMAL_SEQ_SCORESET_RESPONSE["targetGenes"]), + ( + "score_ranges", + TEST_SCORESET_RANGE, + None, + ), + ], +) +def test_cannot_update_score_set_target_data_after_publication( + client, setup_router_db, attribute, expected_response_data, updated_data, session, data_provider, data_files +): + experiment = create_experiment(client) + score_set = create_seq_score_set_with_variants( + client, session, data_provider, experiment["urn"], data_files / "scores.csv" + ) + + response = client.post(f"/api/v1/score-sets/{score_set['urn']}/publish") + assert response.status_code == 200 + published_urn = response.json()["urn"] + response = client.get(f"/api/v1/score-sets/{published_urn}") + assert response.status_code == 200 + response_data = response.json() + + expected_response = update_expected_response_for_created_resources( + deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE), response_data["experiment"], response_data + ) + expected_response["experiment"].update({"publishedDate": date.today().isoformat()}) + expected_response.update( + { + "urn": published_urn, + "publishedDate": date.today().isoformat(), + "numVariants": 3, + "private": False, + "datasetColumns": {"countColumns": [], "scoreColumns": ["score"]}, + "processingState": ProcessingState.success.name, + } + ) + + assert sorted(expected_response.keys()) == sorted(response_data.keys()) + for key in expected_response: + assert (key, expected_response[key]) == (key, response_data[key]) + + score_set_update_payload = deepcopy(TEST_MINIMAL_SEQ_SCORESET) + score_set_update_payload.update({camelize(attribute): updated_data}) + response = client.put(f"/api/v1/score-sets/{published_urn}", json=score_set_update_payload) + assert response.status_code == 200 + + response = client.get(f"/api/v1/score-sets/{published_urn}") + assert response.status_code == 200 + response_data = response.json() + + if expected_response_data: + assert expected_response_data == response_data[camelize(attribute)] + else: + assert camelize(attribute) not in response_data.keys() + + +######################################################################################################################## +# Score set fetching +######################################################################################################################## + + +def test_get_own_private_score_set(client, setup_router_db): + experiment = create_experiment(client) + score_set = create_seq_score_set(client, experiment["urn"]) + expected_response = update_expected_response_for_created_resources( + deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE), experiment, score_set + ) + response = client.get(f"/api/v1/score-sets/{score_set['urn']}") assert response.status_code == 200 response_data = response.json() + assert sorted(expected_response.keys()) == sorted(response_data.keys()) for key in expected_response: assert (key, expected_response[key]) == (key, response_data[key]) @@ -227,35 +431,34 @@ def test_contributor_can_get_other_users_private_score_set(session, client, setu TEST_USER["last_name"], ) - expected_response = deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE) - expected_response.update({"urn": score_set["urn"]}) - expected_response["experiment"].update( - { - "urn": experiment["urn"], - "experimentSetUrn": experiment["experimentSetUrn"], - "scoreSetUrns": [score_set["urn"]], - } + expected_response = update_expected_response_for_created_resources( + deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE), experiment, score_set ) expected_response["contributors"] = [ { + "recordType": "Contributor", "orcidId": TEST_USER["username"], "givenName": TEST_USER["first_name"], "familyName": TEST_USER["last_name"], } ] expected_response["createdBy"] = { + "recordType": "User", "orcidId": EXTRA_USER["username"], "firstName": EXTRA_USER["first_name"], "lastName": EXTRA_USER["last_name"], } expected_response["modifiedBy"] = { + "recordType": "User", "orcidId": EXTRA_USER["username"], "firstName": EXTRA_USER["first_name"], "lastName": EXTRA_USER["last_name"], } + response = client.get(f"/api/v1/score-sets/{score_set['urn']}") assert response.status_code == 200 response_data = response.json() + assert sorted(expected_response.keys()) == sorted(response_data.keys()) for key in expected_response: assert (key, expected_response[key]) == (key, response_data[key]) @@ -264,15 +467,10 @@ def test_contributor_can_get_other_users_private_score_set(session, client, setu def test_admin_can_get_other_user_private_score_set(session, client, admin_app_overrides, setup_router_db): experiment = create_experiment(client) score_set = create_seq_score_set(client, experiment["urn"]) - expected_response = deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE) - expected_response.update({"urn": score_set["urn"]}) - expected_response["experiment"].update( - { - "urn": experiment["urn"], - "experimentSetUrn": experiment["experimentSetUrn"], - "scoreSetUrns": [score_set["urn"]], - } + expected_response = update_expected_response_for_created_resources( + deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE), experiment, score_set ) + with DependencyOverrider(admin_app_overrides): response = client.get(f"/api/v1/score-sets/{score_set['urn']}") @@ -283,6 +481,11 @@ def test_admin_can_get_other_user_private_score_set(session, client, admin_app_o assert (key, expected_response[key]) == (key, response_data[key]) +######################################################################################################################## +# Adding scores to score set +######################################################################################################################## + + def test_add_score_set_variants_scores_only_endpoint(client, setup_router_db, data_files): experiment = create_experiment(client) score_set = create_seq_score_set(client, experiment["urn"]) @@ -336,6 +539,59 @@ def test_add_score_set_variants_scores_and_counts_endpoint(session, client, setu assert score_set == response_data +def test_add_score_set_variants_scores_only_endpoint_utf8_encoded(client, setup_router_db, data_files): + experiment = create_experiment(client) + score_set = create_seq_score_set(client, experiment["urn"]) + scores_csv_path = data_files / "scores_utf8_encoded.csv" + with ( + open(scores_csv_path, "rb") as scores_file, + patch.object(ArqRedis, "enqueue_job", return_value=None) as queue, + ): + response = client.post( + f"/api/v1/score-sets/{score_set['urn']}/variants/data", + files={"scores_file": (scores_csv_path.name, scores_file, "text/csv")}, + ) + queue.assert_called_once() + + assert response.status_code == 200 + response_data = response.json() + jsonschema.validate(instance=response_data, schema=ScoreSet.schema()) + + # We test the worker process that actually adds the variant data separately. Here, we take it as + # fact that it would have succeeded. + score_set.update({"processingState": "processing"}) + assert score_set == response_data + + +def test_add_score_set_variants_scores_and_counts_endpoint_utf8_encoded(session, client, setup_router_db, data_files): + experiment = create_experiment(client) + score_set = create_seq_score_set(client, experiment["urn"]) + scores_csv_path = data_files / "scores_utf8_encoded.csv" + counts_csv_path = data_files / "counts_utf8_encoded.csv" + with ( + open(scores_csv_path, "rb") as scores_file, + open(counts_csv_path, "rb") as counts_file, + patch.object(ArqRedis, "enqueue_job", return_value=None) as queue, + ): + response = client.post( + f"/api/v1/score-sets/{score_set['urn']}/variants/data", + files={ + "scores_file": (scores_csv_path.name, scores_file, "text/csv"), + "counts_file": (counts_csv_path.name, counts_file, "text/csv"), + }, + ) + queue.assert_called_once() + + assert response.status_code == 200 + response_data = response.json() + jsonschema.validate(instance=response_data, schema=ScoreSet.schema()) + + # We test the worker process that actually adds the variant data separately. Here, we take it as + # fact that it would have succeeded. + score_set.update({"processingState": "processing"}) + assert score_set == response_data + + def test_cannot_add_scores_to_score_set_without_email(session, client, setup_router_db, data_files): experiment = create_experiment(client) score_set = create_seq_score_set(client, experiment["urn"]) @@ -424,17 +680,20 @@ def test_contributor_can_add_scores_to_other_user_score_set(session, client, set score_set.update({"processingState": "processing"}) score_set["contributors"] = [ { + "recordType": "Contributor", "orcidId": TEST_USER["username"], "givenName": TEST_USER["first_name"], "familyName": TEST_USER["last_name"], } ] score_set["createdBy"] = { + "recordType": "User", "orcidId": EXTRA_USER["username"], "firstName": EXTRA_USER["first_name"], "lastName": EXTRA_USER["last_name"], } score_set["modifiedBy"] = { + "recordType": "User", "orcidId": EXTRA_USER["username"], "firstName": EXTRA_USER["first_name"], "lastName": EXTRA_USER["last_name"], @@ -480,17 +739,20 @@ def test_contributor_can_add_scores_and_counts_to_other_user_score_set(session, score_set.update({"processingState": "processing"}) score_set["contributors"] = [ { + "recordType": "Contributor", "orcidId": TEST_USER["username"], "givenName": TEST_USER["first_name"], "familyName": TEST_USER["last_name"], } ] score_set["createdBy"] = { + "recordType": "User", "orcidId": EXTRA_USER["username"], "firstName": EXTRA_USER["first_name"], "lastName": EXTRA_USER["last_name"], } score_set["modifiedBy"] = { + "recordType": "User", "orcidId": EXTRA_USER["username"], "firstName": EXTRA_USER["first_name"], "lastName": EXTRA_USER["last_name"], @@ -555,6 +817,11 @@ def test_admin_can_add_scores_and_counts_to_other_user_score_set(session, client assert score_set == response_data +######################################################################################################################## +# Score set publication +######################################################################################################################## + + def test_publish_score_set(session, data_provider, client, setup_router_db, data_files): experiment = create_experiment(client) score_set = create_seq_score_set_with_variants( @@ -564,10 +831,13 @@ def test_publish_score_set(session, data_provider, client, setup_router_db, data response = client.post(f"/api/v1/score-sets/{score_set['urn']}/publish") assert response.status_code == 200 response_data = response.json() - assert response_data["urn"] == "urn:mavedb:00000001-a-1" - assert response_data["experiment"]["urn"] == "urn:mavedb:00000001-a" + assert isinstance(MAVEDB_SCORE_SET_URN_RE.fullmatch(response_data["urn"]), re.Match) + assert isinstance(MAVEDB_EXPERIMENT_URN_RE.fullmatch(response_data["experiment"]["urn"]), re.Match) - expected_response = deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE) + expected_response = update_expected_response_for_created_resources( + deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE), response_data["experiment"], response_data + ) + expected_response["experiment"].update({"publishedDate": date.today().isoformat()}) expected_response.update( { "urn": response_data["urn"], @@ -578,14 +848,6 @@ def test_publish_score_set(session, data_provider, client, setup_router_db, data "processingState": ProcessingState.success.name, } ) - expected_response["experiment"].update( - { - "urn": response_data["experiment"]["urn"], - "experimentSetUrn": response_data["experiment"]["experimentSetUrn"], - "scoreSetUrns": [response_data["urn"]], - "publishedDate": date.today().isoformat(), - } - ) assert sorted(expected_response.keys()) == sorted(response_data.keys()) # refresh score set to post worker state @@ -593,6 +855,11 @@ def test_publish_score_set(session, data_provider, client, setup_router_db, data for key in expected_response: assert (key, expected_response[key]) == (key, score_set[key]) + score_set_variants = session.execute( + select(VariantDbModel).join(ScoreSetDbModel).where(ScoreSetDbModel.urn == score_set["urn"]) + ).scalars() + assert all([variant.urn.startswith("urn:mavedb:") for variant in score_set_variants]) + def test_publish_multiple_score_sets(session, data_provider, client, setup_router_db, data_files): experiment = create_experiment(client) @@ -625,6 +892,19 @@ def test_publish_multiple_score_sets(session, data_provider, client, setup_route assert pub_score_set_3_data["title"] == score_set_3["title"] assert pub_score_set_3_data["experiment"]["urn"] == "urn:mavedb:00000001-a" + score_set_1_variants = session.execute( + select(VariantDbModel).join(ScoreSetDbModel).where(ScoreSetDbModel.urn == score_set_1["urn"]) + ).scalars() + assert all([variant.urn.startswith("urn:mavedb:") for variant in score_set_1_variants]) + score_set_2_variants = session.execute( + select(VariantDbModel).join(ScoreSetDbModel).where(ScoreSetDbModel.urn == score_set_2["urn"]) + ).scalars() + assert all([variant.urn.startswith("urn:mavedb:") for variant in score_set_2_variants]) + score_set_3_variants = session.execute( + select(VariantDbModel).join(ScoreSetDbModel).where(ScoreSetDbModel.urn == score_set_3["urn"]) + ).scalars() + assert all([variant.urn.startswith("urn:mavedb:") for variant in score_set_3_variants]) + def test_cannot_publish_score_set_without_variants(client, setup_router_db): experiment = create_experiment(client) @@ -684,7 +964,10 @@ def test_contributor_can_publish_other_users_score_set(session, data_provider, c assert response_data["urn"] == "urn:mavedb:00000001-a-1" assert response_data["experiment"]["urn"] == "urn:mavedb:00000001-a" - expected_response = deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE) + expected_response = update_expected_response_for_created_resources( + deepcopy(TEST_MINIMAL_SEQ_SCORESET_RESPONSE), response_data["experiment"], response_data + ) + expected_response["experiment"].update({"publishedDate": date.today().isoformat()}) expected_response.update( { "urn": response_data["urn"], @@ -695,27 +978,22 @@ def test_contributor_can_publish_other_users_score_set(session, data_provider, c "processingState": ProcessingState.success.name, } ) - expected_response["experiment"].update( - { - "urn": response_data["experiment"]["urn"], - "experimentSetUrn": response_data["experiment"]["experimentSetUrn"], - "scoreSetUrns": [response_data["urn"]], - "publishedDate": date.today().isoformat(), - } - ) expected_response["contributors"] = [ { + "recordType": "Contributor", "orcidId": TEST_USER["username"], "givenName": TEST_USER["first_name"], "familyName": TEST_USER["last_name"], } ] expected_response["createdBy"] = { + "recordType": "User", "orcidId": EXTRA_USER["username"], "firstName": EXTRA_USER["first_name"], "lastName": EXTRA_USER["last_name"], } expected_response["modifiedBy"] = { + "recordType": "User", "orcidId": EXTRA_USER["username"], "firstName": EXTRA_USER["first_name"], "lastName": EXTRA_USER["last_name"], @@ -727,6 +1005,11 @@ def test_contributor_can_publish_other_users_score_set(session, data_provider, c for key in expected_response: assert (key, expected_response[key]) == (key, score_set[key]) + score_set_variants = session.execute( + select(VariantDbModel).join(ScoreSetDbModel).where(ScoreSetDbModel.urn == score_set["urn"]) + ).scalars() + assert all([variant.urn.startswith("urn:mavedb:") for variant in score_set_variants]) + def test_admin_cannot_publish_other_user_private_score_set( session, data_provider, client, admin_app_overrides, setup_router_db, data_files @@ -744,6 +1027,11 @@ def test_admin_cannot_publish_other_user_private_score_set( assert f"score set with URN '{score_set['urn']}' not found" in response_data["detail"] +######################################################################################################################## +# Score set meta-analysis +######################################################################################################################## + + def test_create_single_score_set_meta_analysis(session, data_provider, client, setup_router_db, data_files): experiment = create_experiment(client) score_set = create_seq_score_set_with_variants( @@ -968,6 +1256,11 @@ def test_multiple_score_set_meta_analysis_multiple_experiment_sets_different_sco assert meta_score_set_3["urn"] == "urn:mavedb:00000003-0-3" +######################################################################################################################## +# Score set search +######################################################################################################################## + + def test_search_score_sets_no_match(session, data_provider, client, setup_router_db, data_files): experiment_1 = create_experiment(client, {"title": "Experiment 1"}) create_seq_score_set_with_variants( @@ -1030,6 +1323,11 @@ def test_search_score_sets_urn_with_space_match(session, data_provider, client, assert response.json()[0]["urn"] == score_set_1_1["urn"] +######################################################################################################################## +# Score set deletion +######################################################################################################################## + + def test_anonymous_cannot_delete_other_users_private_scoreset( session, data_provider, client, setup_router_db, data_files, anonymous_app_overrides ): @@ -1141,6 +1439,11 @@ def test_admin_can_delete_other_users_published_scoreset( assert del_response.status_code == 200 +######################################################################################################################## +# Adding score sets to experiments +######################################################################################################################## + + def test_can_add_score_set_to_own_private_experiment(session, client, setup_router_db): experiment = create_experiment(client) score_set_post_payload = deepcopy(TEST_MINIMAL_SEQ_SCORESET) @@ -1224,3 +1527,33 @@ def test_contributor_can_add_score_set_to_others_public_experiment( score_set_post_payload["experimentUrn"] = published_score_set["experiment"]["urn"] response = client.post("/api/v1/score-sets/", json=score_set_post_payload) assert response.status_code == 200 + + +def test_cannot_create_score_set_with_inactive_license(session, client, setup_router_db): + experiment = create_experiment(client) + score_set_post_payload = deepcopy(TEST_MINIMAL_SEQ_SCORESET) + score_set_post_payload["experimentUrn"] = experiment["urn"] + score_set_post_payload["licenseId"] = TEST_INACTIVE_LICENSE["id"] + response = client.post("/api/v1/score-sets/", json=score_set_post_payload) + assert response.status_code == 400 + + +def test_cannot_modify_score_set_to_inactive_license(session, client, setup_router_db): + experiment = create_experiment(client) + score_set = create_seq_score_set(client, experiment["urn"]) + score_set_post_payload = score_set.copy() + score_set_post_payload.update({"licenseId": TEST_INACTIVE_LICENSE["id"], "urn": score_set["urn"]}) + response = client.put(f"/api/v1/score-sets/{score_set['urn']}", json=score_set_post_payload) + assert response.status_code == 400 + + +def test_can_modify_metadata_for_score_set_with_inactive_license(session, client, setup_router_db): + experiment = create_experiment(client) + score_set = create_seq_score_set(client, experiment["urn"]) + change_to_inactive_license(session, score_set["urn"]) + score_set_post_payload = score_set.copy() + score_set_post_payload.update({"title": "Update title", "urn": score_set["urn"]}) + response = client.put(f"/api/v1/score-sets/{score_set['urn']}", json=score_set_post_payload) + assert response.status_code == 200 + response_data = response.json() + assert ("title", response_data["title"]) == ("title", "Update title") diff --git a/tests/routers/test_target_gene.py b/tests/routers/test_target_gene.py new file mode 100644 index 00000000..4a607101 --- /dev/null +++ b/tests/routers/test_target_gene.py @@ -0,0 +1,113 @@ +from mavedb.models.score_set import ScoreSet as ScoreSetDbModel +from tests.helpers.util import ( + change_ownership, + create_experiment, + create_seq_score_set_with_variants, +) + + +def test_search_my_target_genes_no_match(session, data_provider, client, setup_router_db, data_files): + experiment_1 = create_experiment(client, {"title": "Experiment 1"}) + create_seq_score_set_with_variants( + client, + session, + data_provider, + experiment_1["urn"], + data_files / "scores.csv", + update={"title": "Test Score Set"}, + ) + + search_payload = {"text": "NONEXISTENT"} + response = client.post("/api/v1/me/target-genes/search", json=search_payload) + assert response.status_code == 200 + assert len(response.json()) == 0 + + +def test_search_my_target_genes_no_match_on_other_user(session, data_provider, client, setup_router_db, data_files): + experiment_1 = create_experiment(client, {"title": "Experiment 1"}) + score_set = create_seq_score_set_with_variants( + client, + session, + data_provider, + experiment_1["urn"], + data_files / "scores.csv", + update={"title": "Test Score Set"}, + ) + change_ownership(session, score_set["urn"], ScoreSetDbModel) + + search_payload = {"text": "TEST1"} + response = client.post("/api/v1/me/target-genes/search", json=search_payload) + assert response.status_code == 200 + assert len(response.json()) == 0 + + +def test_search_my_target_genes_match(session, data_provider, client, setup_router_db, data_files): + experiment_1 = create_experiment(client, {"title": "Experiment 1"}) + create_seq_score_set_with_variants( + client, + session, + data_provider, + experiment_1["urn"], + data_files / "scores.csv", + update={"title": "Test Score Set"}, + ) + + search_payload = {"text": "TEST1"} + response = client.post("/api/v1/me/target-genes/search", json=search_payload) + assert response.status_code == 200 + assert len(response.json()) == 1 + assert response.json()[0]["name"] == "TEST1" + + +def test_search_target_genes_no_match(session, data_provider, client, setup_router_db, data_files): + experiment_1 = create_experiment(client, {"title": "Experiment 1"}) + create_seq_score_set_with_variants( + client, + session, + data_provider, + experiment_1["urn"], + data_files / "scores.csv", + update={"title": "Test Score Set"}, + ) + + search_payload = {"text": "NONEXISTENT"} + response = client.post("/api/v1/target-genes/search", json=search_payload) + assert response.status_code == 200 + assert len(response.json()) == 0 + + +def test_search_target_genes_match_on_other_user(session, data_provider, client, setup_router_db, data_files): + experiment_1 = create_experiment(client, {"title": "Experiment 1"}) + score_set = create_seq_score_set_with_variants( + client, + session, + data_provider, + experiment_1["urn"], + data_files / "scores.csv", + update={"title": "Test Score Set"}, + ) + change_ownership(session, score_set["urn"], ScoreSetDbModel) + + search_payload = {"text": "TEST1"} + response = client.post("/api/v1/target-genes/search", json=search_payload) + assert response.status_code == 200 + assert len(response.json()) == 1 + assert response.json()[0]["name"] == "TEST1" + + +def test_search_target_genes_match(session, data_provider, client, setup_router_db, data_files): + experiment_1 = create_experiment(client, {"title": "Experiment 1"}) + create_seq_score_set_with_variants( + client, + session, + data_provider, + experiment_1["urn"], + data_files / "scores.csv", + update={"title": "Test Score Set"}, + ) + + search_payload = {"text": "TEST1"} + response = client.post("/api/v1/target-genes/search", json=search_payload) + assert response.status_code == 200 + assert len(response.json()) == 1 + assert response.json()[0]["name"] == "TEST1" diff --git a/tests/validation/test_target.py b/tests/validation/test_target.py index b1920bed..0ac2db12 100644 --- a/tests/validation/test_target.py +++ b/tests/validation/test_target.py @@ -1,22 +1,8 @@ from unittest import TestCase -from mavedb.lib.validation.constants.target import valid_categories, valid_sequence_types +from mavedb.lib.validation.constants.target import valid_sequence_types from mavedb.lib.validation.exceptions import ValidationError -from mavedb.lib.validation.target import validate_sequence_category, validate_target_category, validate_target_sequence - - -class TestValidateTargetCategory(TestCase): - def test_valid(self): - for category in valid_categories: - validate_target_category(category) - - def test_invalid_category(self): - with self.assertRaises(ValidationError): - validate_target_category("Protein") - - def test_invalid_case(self): - with self.assertRaises(ValidationError): - validate_target_category("protein coding") +from mavedb.lib.validation.target import validate_sequence_category, validate_target_sequence class TestValidateSequenceCategory(TestCase): diff --git a/tests/view_models/test_target_gene.py b/tests/view_models/test_target_gene.py index 564a27fd..13f8b78a 100644 --- a/tests/view_models/test_target_gene.py +++ b/tests/view_models/test_target_gene.py @@ -5,7 +5,7 @@ def test_create_target_gene_with_sequence(): name = "UBE2I" - category = "Regulatory" + category = "regulatory" external_identifiers = [{"identifier": {"dbName": "Ensembl", "identifier": "ENSG00000103275"}, "offset": 1}] target_sequence = { "sequenceType": "dna", @@ -37,12 +37,12 @@ def test_create_target_gene_with_sequence(): target_sequence=target_sequence, ) assert externalIdentifier.name == "UBE2I" - assert externalIdentifier.category == "Regulatory" + assert externalIdentifier.category == "regulatory" def test_create_target_gene_with_accession(): name = "BRCA1" - category = "Regulatory" + category = "regulatory" external_identifiers = [{"identifier": {"dbName": "Ensembl", "identifier": "ENSG00000103275"}, "offset": 1}] target_accession = {"accession": "NM_001637.3", "assembly": "GRCh37", "gene": "BRCA1"} externalIdentifier = TargetGeneCreate( @@ -52,7 +52,7 @@ def test_create_target_gene_with_accession(): target_accession=target_accession, ) assert externalIdentifier.name == "BRCA1" - assert externalIdentifier.category == "Regulatory" + assert externalIdentifier.category == "regulatory" def test_create_invalid_category(): @@ -91,14 +91,14 @@ def test_create_invalid_category(): target_sequence=target_sequence, ) assert ( - "invalid name is not a valid target category. Valid categories are Protein coding, Regulatory, and Other" - " noncoding" in str(exc_info.value) + "value is not a valid enumeration member; permitted: 'protein_coding', 'regulatory', 'other_noncoding'" + in str(exc_info.value) ) def test_create_invalid_sequence_type(): name = "UBE2I" - category = "Regulatory" + category = "regulatory" external_identifiers = [{"identifier": {"dbName": "Ensembl", "identifier": "ENSG00000103275"}, "offset": 0}] taxonomy = { "taxId": 9606, @@ -136,7 +136,7 @@ def test_create_invalid_sequence_type(): def test_create_not_match_sequence_and_type(): name = "UBE2I" - category = "Regulatory" + category = "regulatory" external_identifiers = [{"identifier": {"dbName": "Ensembl", "identifier": "ENSG00000103275"}, "offset": 0}] target_sequence = {"sequenceType": "dna", "sequence": "ARCG"} taxonomy = { @@ -163,7 +163,7 @@ def test_create_not_match_sequence_and_type(): def test_create_invalid_sequence(): name = "UBE2I" - category = "Regulatory" + category = "regulatory" external_identifiers = [{"identifier": {"dbName": "Ensembl", "identifier": "ENSG00000103275"}, "offset": 0}] target_sequence = {"sequenceType": "dna", "sequence": "AOCG%"} taxonomy = { @@ -190,7 +190,7 @@ def test_create_invalid_sequence(): def test_cant_create_target_gene_without_sequence_or_accession(): name = "UBE2I" - category = "Regulatory" + category = "regulatory" external_identifiers = [{"identifier": {"dbName": "Ensembl", "identifier": "ENSG00000103275"}, "offset": 1}] with pytest.raises(ValueError) as exc_info: TargetGeneCreate( @@ -204,7 +204,7 @@ def test_cant_create_target_gene_without_sequence_or_accession(): def test_cant_create_target_gene_with_both_sequence_and_accession(): name = "UBE2I" - category = "Regulatory" + category = "regulatory" external_identifiers = [{"identifier": {"dbName": "Ensembl", "identifier": "ENSG00000103275"}, "offset": 1}] target_accession = {"accession": "NM_001637.3", "assembly": "GRCh37", "gene": "BRCA1"} target_sequence = { diff --git a/tests/worker/conftest.py b/tests/worker/conftest.py index 3a2a4cf6..7d989005 100644 --- a/tests/worker/conftest.py +++ b/tests/worker/conftest.py @@ -6,7 +6,7 @@ from mavedb.models.license import License from mavedb.models.taxonomy import Taxonomy from mavedb.models.user import User -from tests.helpers.constants import EXTRA_USER, TEST_LICENSE, TEST_TAXONOMY, TEST_USER +from tests.helpers.constants import EXTRA_USER, TEST_LICENSE, TEST_INACTIVE_LICENSE, TEST_TAXONOMY, TEST_USER from tests.helpers.util import create_experiment, create_seq_score_set @@ -17,6 +17,7 @@ def setup_worker_db(session): db.add(User(**EXTRA_USER)) db.add(Taxonomy(**TEST_TAXONOMY)) db.add(License(**TEST_LICENSE)) + db.add(License(**TEST_INACTIVE_LICENSE)) db.commit()