Skip to content

Commit c740d6b

Browse files
committed
Merge branch 'release-2024.4.2' into estelle/excludeMetaAnalysisExperiment
2 parents 66ff1d9 + f772db4 commit c740d6b

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

45 files changed

+1426
-315
lines changed

alembic/versions/2b6f40ea2fb6_add_score_range_column.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""Add score range column
22
33
Revision ID: 2b6f40ea2fb6
4-
Revises: 1d4933b4b6f7
4+
Revises: 1cee01c42909
55
Create Date: 2024-09-09 12:25:33.180077
66
77
"""
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
"""Add active column to licenses
2+
3+
Revision ID: 68a0ec57694e
4+
Revises: 03c7124c33e1
5+
Create Date: 2024-10-22 15:36:41.868909
6+
7+
"""
8+
9+
from alembic import op
10+
import sqlalchemy as sa
11+
12+
13+
# revision identifiers, used by Alembic.
14+
revision = "68a0ec57694e"
15+
down_revision = "03c7124c33e1"
16+
branch_labels = None
17+
depends_on = None
18+
19+
20+
def upgrade():
21+
# ### commands auto generated by Alembic - please adjust! ###
22+
op.add_column("licenses", sa.Column("active", sa.Boolean(), nullable=False, server_default=sa.true()))
23+
# ### end Alembic commands ###
24+
25+
26+
def downgrade():
27+
# ### commands auto generated by Alembic - please adjust! ###
28+
op.drop_column("licenses", "active")
29+
# ### end Alembic commands ###

pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
44

55
[tool.poetry]
66
name = "mavedb"
7-
version = "2024.4.1"
7+
version = "2024.4.2"
88
description = "API for MaveDB, the database of Multiplexed Assays of Variant Effect."
99
license = "AGPL-3.0-only"
1010
readme = "README.md"
@@ -99,6 +99,7 @@ mypy_path = "mypy_stubs"
9999
addopts = "-v -rP --import-mode=importlib --disable-socket --allow-hosts localhost,::1,127.0.0.1"
100100
asyncio_mode = 'strict'
101101
testpaths = "tests/"
102+
pythonpath = "."
102103
norecursedirs = "tests/helpers/"
103104
# Uncomment the following lines to include application log output in Pytest logs.
104105
# log_cli = true

src/mavedb/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,6 @@
66
logger = module_logging.getLogger(__name__)
77

88
__project__ = "mavedb-api"
9-
__version__ = "2024.4.1"
9+
__version__ = "2024.4.2"
1010

1111
logger.info(f"MaveDB {__version__}")

src/mavedb/lib/script_environment.py

Lines changed: 0 additions & 25 deletions
This file was deleted.

src/mavedb/lib/target_genes.py

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
import logging
2+
from typing import Optional
3+
4+
from sqlalchemy import func, or_
5+
from sqlalchemy.orm import Session
6+
7+
from mavedb.lib.logging.context import logging_context, save_to_logging_context
8+
from mavedb.models.contributor import Contributor
9+
from mavedb.models.score_set import ScoreSet
10+
from mavedb.models.target_gene import TargetGene
11+
from mavedb.models.user import User
12+
from mavedb.view_models.search import TextSearch
13+
14+
logger = logging.getLogger(__name__)
15+
16+
17+
def search_target_genes(
18+
db: Session,
19+
owner_or_contributor: Optional[User],
20+
search: TextSearch,
21+
limit: Optional[int],
22+
) -> list[TargetGene]:
23+
save_to_logging_context({"target_gene_search_criteria": search.dict()})
24+
25+
query = db.query(TargetGene)
26+
27+
if search.text and len(search.text.strip()) > 0:
28+
lower_search_text = search.text.strip().lower()
29+
query = query.filter(func.lower(TargetGene.name).contains(lower_search_text))
30+
if owner_or_contributor is not None:
31+
query = query.filter(
32+
TargetGene.score_set.has(
33+
or_(
34+
ScoreSet.created_by_id == owner_or_contributor.id,
35+
ScoreSet.contributors.any(
36+
Contributor.orcid_id == owner_or_contributor.username
37+
),
38+
)
39+
)
40+
)
41+
42+
query = query.order_by(TargetGene.name)
43+
if limit is not None:
44+
query = query.limit(limit)
45+
46+
target_genes = query.all()
47+
if not target_genes:
48+
target_genes = []
49+
50+
save_to_logging_context({"matching_resources": len(target_genes)})
51+
logger.debug(
52+
msg=f"Target gene search yielded {len(target_genes)} matching resources.",
53+
extra=logging_context(),
54+
)
55+
56+
return target_genes

src/mavedb/models/license.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from datetime import date
22

3-
from sqlalchemy import Column, Date, Integer, String
3+
from sqlalchemy import Boolean, Column, Date, Integer, String
44

55
from mavedb.db.base import Base
66

@@ -16,3 +16,4 @@ class License(Base):
1616
version = Column(String, nullable=True, unique=False)
1717
creation_date = Column(Date, nullable=False, default=date.today)
1818
modification_date = Column(Date, nullable=False, default=date.today, onupdate=date.today)
19+
active = Column(Boolean, nullable=False)

src/mavedb/models/mapped_variant.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ class MappedVariant(Base):
1414

1515
id = Column(Integer, primary_key=True)
1616

17-
pre_mapped = Column(JSONB, nullable=True)
18-
post_mapped = Column(JSONB, nullable=True)
17+
pre_mapped = Column(JSONB(none_as_null=True), nullable=True)
18+
post_mapped = Column(JSONB(none_as_null=True), nullable=True)
1919
vrs_version = Column(String, nullable=True)
2020
error_message = Column(String, nullable=True)
2121
modification_date = Column(Date, nullable=False, default=date.today, onupdate=date.today)

src/mavedb/routers/licenses.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,19 @@ def list_licenses(
2323
return items
2424

2525

26+
@router.get("/active", status_code=200, response_model=List[license.ShortLicense], responses={404: {}})
27+
def list_active_licenses(
28+
*,
29+
db: Session = Depends(deps.get_db),
30+
) -> Any:
31+
"""
32+
List active licenses.
33+
"""
34+
35+
items = db.query(License).where(License.active.is_(True)).order_by(License.short_name).all()
36+
return items
37+
38+
2639
@router.get("/{item_id}", status_code=200, response_model=license.License, responses={404: {}})
2740
def fetch_license(
2841
*,

src/mavedb/routers/score_sets.py

Lines changed: 69 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -348,6 +348,11 @@ async def create_score_set(
348348
if not license_:
349349
logger.info(msg="Failed to create score set; The requested license does not exist.", extra=logging_context())
350350
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Unknown license")
351+
elif not license_.active:
352+
logger.info(
353+
msg="Failed to create score set; The requested license is no longer active.", extra=logging_context()
354+
)
355+
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid license")
351356

352357
save_to_logging_context({"requested_superseded_score_set": item_create.superseded_score_set_urn})
353358
if item_create.superseded_score_set_urn is not None:
@@ -651,7 +656,9 @@ async def upload_score_set_variant_data(
651656
return item
652657

653658

654-
@router.put("/score-sets/{urn}", response_model=score_set.ScoreSet, responses={422: {}})
659+
@router.put(
660+
"/score-sets/{urn}", response_model=score_set.ScoreSet, responses={422: {}}, response_model_exclude_none=True
661+
)
655662
async def update_score_set(
656663
*,
657664
urn: str,
@@ -673,68 +680,73 @@ async def update_score_set(
673680

674681
assert_permission(user_data, item, Action.UPDATE)
675682

676-
# Editing unpublished score set
677-
if item.private is True:
678-
license_ = None
679-
680-
if item_update.license_id is not None:
681-
save_to_logging_context({"license": item_update.license_id})
682-
license_ = db.query(License).filter(License.id == item_update.license_id).one_or_none()
683+
for var, value in vars(item_update).items():
684+
if var not in [
685+
"contributors",
686+
"score_ranges",
687+
"doi_identifiers",
688+
"experiment_urn",
689+
"license_id",
690+
"secondary_publication_identifiers",
691+
"primary_publication_identifiers",
692+
"target_genes",
693+
]:
694+
setattr(item, var, value) if value else None
695+
696+
if item_update.license_id is not None:
697+
save_to_logging_context({"license": item_update.license_id})
698+
license_ = db.query(License).filter(License.id == item_update.license_id).one_or_none()
699+
700+
if not license_:
701+
logger.info(
702+
msg="Failed to update score set; The requested license does not exist.", extra=logging_context()
703+
)
704+
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Unknown license")
683705

684-
if not license_:
685-
logger.info(
686-
msg="Failed to update score set; The requested license does not exist.", extra=logging_context()
687-
)
688-
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Unknown license")
706+
# Allow in-active licenses to be retained on update if they already exist on the item.
707+
elif not license_.active and item.licence_id != item_update.license_id:
708+
logger.info(
709+
msg="Failed to update score set license; The requested license is no longer active.",
710+
extra=logging_context(),
711+
)
712+
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid license")
689713

690-
item.license = license_
714+
item.license = license_
691715

692-
for var, value in vars(item_update).items():
693-
if var not in [
694-
"contributors",
695-
"score_ranges",
696-
"doi_identifiers",
697-
"experiment_urn",
698-
"license_id",
699-
"secondary_publication_identifiers",
700-
"primary_publication_identifiers",
701-
"target_genes",
702-
]:
703-
setattr(item, var, value) if value else None
704-
705-
try:
706-
item.contributors = [
707-
await find_or_create_contributor(db, contributor.orcid_id)
708-
for contributor in item_update.contributors or []
709-
]
710-
except NonexistentOrcidUserError as e:
711-
logger.error(msg="Could not find ORCID user with the provided user ID.", extra=logging_context())
712-
raise pydantic.ValidationError(
713-
[pydantic.error_wrappers.ErrorWrapper(ValidationError(str(e)), loc="contributors")],
714-
model=score_set.ScoreSetUpdate,
715-
)
716+
item.doi_identifiers = [
717+
await find_or_create_doi_identifier(db, identifier.identifier)
718+
for identifier in item_update.doi_identifiers or []
719+
]
720+
primary_publication_identifiers = [
721+
await find_or_create_publication_identifier(db, identifier.identifier, identifier.db_name)
722+
for identifier in item_update.primary_publication_identifiers or []
723+
]
724+
publication_identifiers = [
725+
await find_or_create_publication_identifier(db, identifier.identifier, identifier.db_name)
726+
for identifier in item_update.secondary_publication_identifiers or []
727+
] + primary_publication_identifiers
716728

717-
item.doi_identifiers = [
718-
await find_or_create_doi_identifier(db, identifier.identifier)
719-
for identifier in item_update.doi_identifiers or []
720-
]
721-
primary_publication_identifiers = [
722-
await find_or_create_publication_identifier(db, identifier.identifier, identifier.db_name)
723-
for identifier in item_update.primary_publication_identifiers or []
724-
]
725-
publication_identifiers = [
726-
await find_or_create_publication_identifier(db, identifier.identifier, identifier.db_name)
727-
for identifier in item_update.secondary_publication_identifiers or []
728-
] + primary_publication_identifiers
729+
# create a temporary `primary` attribute on each of our publications that indicates
730+
# to our association proxy whether it is a primary publication or not
731+
primary_identifiers = [p.identifier for p in primary_publication_identifiers]
732+
for publication in publication_identifiers:
733+
setattr(publication, "primary", publication.identifier in primary_identifiers)
729734

730-
# create a temporary `primary` attribute on each of our publications that indicates
731-
# to our association proxy whether it is a primary publication or not
732-
primary_identifiers = [pub.identifier for pub in primary_publication_identifiers]
733-
for publication in publication_identifiers:
734-
setattr(publication, "primary", publication.identifier in primary_identifiers)
735+
item.publication_identifiers = publication_identifiers
735736

736-
item.publication_identifiers = publication_identifiers
737+
try:
738+
item.contributors = [
739+
await find_or_create_contributor(db, contributor.orcid_id) for contributor in item_update.contributors or []
740+
]
741+
except NonexistentOrcidUserError as e:
742+
logger.error(msg="Could not find ORCID user with the provided user ID.", extra=logging_context())
743+
raise pydantic.ValidationError(
744+
[pydantic.error_wrappers.ErrorWrapper(ValidationError(str(e)), loc="contributors")],
745+
model=score_set.ScoreSetUpdate,
746+
)
737747

748+
# Score set has not been published and attributes affecting scores may still be edited.
749+
if item.private:
738750
if item_update.score_ranges:
739751
item.score_ranges = item_update.score_ranges.dict()
740752
else:
@@ -889,35 +901,8 @@ async def update_score_set(
889901
if job is not None:
890902
save_to_logging_context({"worker_job_id": job.job_id})
891903
logger.info(msg="Enqueud variant creation job.", extra=logging_context())
892-
893-
for var, value in vars(item_update).items():
894-
if var not in [
895-
"score_ranges",
896-
"contributors",
897-
"doi_identifiers",
898-
"experiment_urn",
899-
"primary_publication_identifiers",
900-
"secondary_publication_identifiers",
901-
"target_genes",
902-
]:
903-
setattr(item, var, value) if value else None
904-
905-
# Editing published score set
906904
else:
907-
for var, value in vars(item_update).items():
908-
if var in ["title", "method_text", "abstract_text", "short_description"]:
909-
setattr(item, var, value) if value else None
910-
try:
911-
item.contributors = [
912-
await find_or_create_contributor(db, contributor.orcid_id)
913-
for contributor in item_update.contributors or []
914-
]
915-
except NonexistentOrcidUserError as e:
916-
logger.error(msg="Could not find ORCID user with the provided user ID.", extra=logging_context())
917-
raise pydantic.ValidationError(
918-
[pydantic.error_wrappers.ErrorWrapper(ValidationError(str(e)), loc="contributors")],
919-
model=score_set.ScoreSetUpdate,
920-
)
905+
logger.debug(msg="Skipped score range and target gene update. Score set is published.", extra=logging_context())
921906

922907
db.add(item)
923908
db.commit()

0 commit comments

Comments
 (0)