Skip to content

Commit 9f0e5d7

Browse files
authored
Merge pull request #489 from VariantEffect/release-2025.3.0
Release 2025.3.0
2 parents 1f54107 + 3717967 commit 9f0e5d7

File tree

227 files changed

+16855
-3077
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

227 files changed

+16855
-3077
lines changed

.github/workflows/run-tests-on-push.yml

Lines changed: 25 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -6,61 +6,37 @@ env:
66
LOG_CONFIG: test
77

88
jobs:
9-
run-tests-3_9-core-dependencies:
9+
run-mypy-3_11:
1010
runs-on: ubuntu-latest
11-
name: Pytest on Core Dependencies-- Python 3.9
11+
name: MyPy checks on Python 3.11
1212
steps:
1313
- uses: actions/checkout@v4
1414
- uses: actions/setup-python@v5
1515
with:
16-
python-version: "3.9"
17-
cache: 'pip'
18-
- run: pip install --upgrade pip
19-
- run: pip install poetry
20-
- run: poetry install --with dev
21-
- run: poetry run pytest tests/
22-
23-
run-tests-3_9:
24-
runs-on: ubuntu-latest
25-
name: Pytest on Optional Dependencies-- Python 3.9
26-
steps:
27-
- uses: actions/checkout@v4
28-
- uses: actions/setup-python@v5
29-
with:
30-
python-version: "3.9"
16+
python-version: "3.11"
3117
cache: 'pip'
18+
- run: sudo apt-get update
19+
- run: sudo apt-get install -y tabix
3220
- run: pip install --upgrade pip
3321
- run: pip install poetry
3422
- run: poetry install --with dev --extras server
35-
- run: poetry run pytest tests/ --show-capture=stdout --cov=src
36-
37-
run-tests-3_10-core-dependencies:
38-
runs-on: ubuntu-latest
39-
name: Pytest on Core Dependencies-- Python 3.10
40-
steps:
41-
- uses: actions/checkout@v4
42-
- uses: actions/setup-python@v5
43-
with:
44-
python-version: "3.10"
45-
cache: 'pip'
46-
- run: pip install --upgrade pip
47-
- run: pip install poetry
48-
- run: poetry install --with dev
49-
- run: poetry run pytest tests/
23+
- run: poetry run mypy src/
5024

51-
run-tests-3_10:
25+
run-ruff-lint:
5226
runs-on: ubuntu-latest
53-
name: Pytest on Optional Dependencies-- Python 3.10
27+
name: Ruff linting on Python 3.11
5428
steps:
5529
- uses: actions/checkout@v4
5630
- uses: actions/setup-python@v5
5731
with:
58-
python-version: "3.10"
32+
python-version: "3.11"
5933
cache: 'pip'
34+
- run: sudo apt-get update
35+
- run: sudo apt-get install -y tabix
6036
- run: pip install --upgrade pip
6137
- run: pip install poetry
6238
- run: poetry install --with dev --extras server
63-
- run: poetry run pytest tests/ --show-capture=stdout --cov=src
39+
- run: poetry run ruff check
6440

6541
run-tests-3_11-core-dependencies:
6642
runs-on: ubuntu-latest
@@ -85,35 +61,39 @@ jobs:
8561
with:
8662
python-version: "3.11"
8763
cache: 'pip'
64+
- run: sudo apt-get update
65+
- run: sudo apt-get install -y tabix
8866
- run: pip install --upgrade pip
8967
- run: pip install poetry
9068
- run: poetry install --with dev --extras server
9169
- run: poetry run pytest tests/ --show-capture=stdout --cov=src
9270

93-
run-mypy-3_10:
71+
run-tests-3_12-core-dependencies:
9472
runs-on: ubuntu-latest
95-
name: MyPy on Full Codebase-- Python 3.10
73+
name: Pytest on Core Dependencies-- Python 3.12
9674
steps:
9775
- uses: actions/checkout@v4
9876
- uses: actions/setup-python@v5
9977
with:
100-
python-version: "3.10"
78+
python-version: "3.12"
10179
cache: 'pip'
10280
- run: pip install --upgrade pip
10381
- run: pip install poetry
104-
- run: poetry install --with dev --extras server
105-
- run: poetry run mypy src/
82+
- run: poetry install --with dev
83+
- run: poetry run pytest tests/
10684

107-
run-ruff-lint:
85+
run-tests-3_12:
10886
runs-on: ubuntu-latest
109-
name: Ruff on Full Codebase-- Python 3.10
87+
name: Pytest on Optional Dependencies-- Python 3.12
11088
steps:
11189
- uses: actions/checkout@v4
11290
- uses: actions/setup-python@v5
11391
with:
114-
python-version: "3.10"
92+
python-version: "3.12"
11593
cache: 'pip'
94+
- run: sudo apt-get update
95+
- run: sudo apt-get install -y tabix
11696
- run: pip install --upgrade pip
11797
- run: pip install poetry
11898
- run: poetry install --with dev --extras server
119-
- run: poetry run ruff check
99+
- run: poetry run pytest tests/ --show-capture=stdout --cov=src

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
# python-base
33
# Set up shared environment variables
44
################################
5-
FROM python:3.9 AS python-base
5+
FROM python:3.11 AS python-base
66

77
# Poetry
88
# https://python-poetry.org/docs/configuration/#using-environment-variables

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ Or add `mavedb` to your Python project's dependencies.
2424

2525
### Prerequisites
2626

27-
- Python 3.9 or later
27+
- Python 3.11 or later
2828
- PIP
2929
- [Poetry](https://python-poetry.org/) for building and publishing distributions. For details on installing poetry, consult its [documentation](https://python-poetry.org/docs/#installation).
3030

Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
"""
2+
To be run as part of revision af87c9953d2d
3+
"""
4+
5+
import sqlalchemy as sa
6+
from sqlalchemy.orm import Session, configure_mappers
7+
8+
from mavedb.models import *
9+
from sqlalchemy.orm import Session
10+
11+
from mavedb.models.score_set import ScoreSet
12+
from mavedb.view_models.score_range import ScoreSetRangesCreate, InvestigatorScoreRangesCreate, PillarProjectScoreRangesCreate, PillarProjectScoreRangeCreate
13+
14+
15+
from mavedb.db.session import SessionLocal
16+
17+
configure_mappers()
18+
19+
20+
evidence_strength_to_label = {
21+
-8: "BS3_VERY_STRONG",
22+
-4: "BS3_STRONG",
23+
-3: "BS3_MODERATE+",
24+
-2: "BS3_MODERATE",
25+
-1: "BS3_SUPPORTING",
26+
1: "PS3_SUPPORTING",
27+
2: "PS3_MODERATE",
28+
3: "PS3_MODERATE+",
29+
4: "PS3_STRONG",
30+
8: "PS3_VERY_STRONG",
31+
}
32+
33+
34+
def do_migration(db: Session):
35+
score_sets_with_ranges_or_calibrations = db.scalars(sa.select(ScoreSet).where(ScoreSet.score_ranges.isnot(None) | ScoreSet.score_calibrations.isnot(None))).all()
36+
37+
for score_set in score_sets_with_ranges_or_calibrations:
38+
if score_set.score_ranges is not None:
39+
investigator_ranges = InvestigatorScoreRangesCreate(
40+
**score_set.score_ranges
41+
)
42+
for score_range in investigator_ranges.ranges:
43+
score_range.inclusive_lower_bound = False if score_range.range[0] is None else True
44+
score_range.inclusive_upper_bound = False
45+
46+
else:
47+
investigator_ranges = None
48+
49+
if score_set.score_calibrations is not None:
50+
thresholds = score_set.score_calibrations.get("pillar_project", {}).get("thresholds", [])
51+
evidence_strengths = score_set.score_calibrations.get("pillar_project", {}).get("evidence_strengths", [])
52+
positive_likelihood_ratios = score_set.score_calibrations.get("pillar_project", {}).get("positive_likelihood_ratios", [])
53+
prior_probability_pathogenicity = score_set.score_calibrations.get("pillar_project", {}).get("prior_probability_pathogenicity", None)
54+
parameter_sets = score_set.score_calibrations.get("pillar_project", {}).get("parameter_sets", [])
55+
56+
ranges = []
57+
boundary_direction = -1 # Start with a negative sign to indicate the first range has the lower boundary appearing prior to the threshold
58+
for idx, vals in enumerate(zip(thresholds, evidence_strengths, positive_likelihood_ratios)):
59+
threshold, evidence_strength, positive_likelihood_ratio = vals
60+
61+
if idx == 0:
62+
calculated_range = (None, threshold)
63+
ranges.append(PillarProjectScoreRangeCreate(
64+
range=(None, threshold),
65+
classification="normal" if evidence_strength < 0 else "abnormal",
66+
label=str(evidence_strength),
67+
evidence_strength=evidence_strength,
68+
positive_likelihood_ratio=positive_likelihood_ratio,
69+
inclusive_lower_bound=False,
70+
inclusive_upper_bound=False,
71+
))
72+
elif idx == len(thresholds) - 1:
73+
calculated_range = (threshold, None)
74+
ranges.append(PillarProjectScoreRangeCreate(
75+
range=(threshold, None),
76+
classification="normal" if evidence_strength < 0 else "abnormal",
77+
evidence_strength=evidence_strength,
78+
label=str(evidence_strength),
79+
positive_likelihood_ratio=positive_likelihood_ratio,
80+
inclusive_lower_bound=True,
81+
inclusive_upper_bound=False,
82+
))
83+
else:
84+
if boundary_direction < 0:
85+
calculated_range = (thresholds[idx - 1], threshold)
86+
else:
87+
calculated_range = (threshold, thresholds[idx + 1])
88+
89+
ranges.append(PillarProjectScoreRangeCreate(
90+
range=calculated_range,
91+
classification="normal" if evidence_strength < 0 else "abnormal",
92+
label=str(evidence_strength),
93+
evidence_strength=evidence_strength,
94+
positive_likelihood_ratio=positive_likelihood_ratio,
95+
inclusive_lower_bound=True,
96+
inclusive_upper_bound=False,
97+
))
98+
99+
# Set boundary_direction if the sign of evidence_strength flips compared to the next one
100+
if idx != len(evidence_strengths) - 1 and (evidence_strengths[idx + 1] * evidence_strength < 0):
101+
boundary_direction = -boundary_direction
102+
103+
pillar_project_ranges = PillarProjectScoreRangesCreate(
104+
prior_probability_pathogenicity=prior_probability_pathogenicity,
105+
parameter_sets=parameter_sets,
106+
ranges=ranges,
107+
)
108+
else:
109+
pillar_project_ranges = None
110+
111+
score_set.score_ranges = ScoreSetRangesCreate(
112+
investigator_provided=investigator_ranges if investigator_ranges else None,
113+
pillar_project=pillar_project_ranges if pillar_project_ranges else None,
114+
).model_dump()
115+
db.add(score_set)
116+
117+
118+
if __name__ == "__main__":
119+
db = SessionLocal()
120+
db.current_user = None # type: ignore
121+
122+
do_migration(db)
123+
124+
db.commit()
125+
db.close()
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
"""Make faf95_max and ancestry possibly nullable
2+
3+
Revision ID: 2b7a977e7e98
4+
Revises: a8e345cca190
5+
Create Date: 2025-08-21 10:08:58.565416
6+
7+
"""
8+
9+
from alembic import op
10+
import sqlalchemy as sa
11+
12+
13+
# revision identifiers, used by Alembic.
14+
revision = "2b7a977e7e98"
15+
down_revision = "a8e345cca190"
16+
branch_labels = None
17+
depends_on = None
18+
19+
20+
def upgrade():
21+
# ### commands auto generated by Alembic - please adjust! ###
22+
op.alter_column("gnomad_variants", "faf95_max", existing_type=sa.DOUBLE_PRECISION(precision=53), nullable=True)
23+
op.alter_column("gnomad_variants", "faf95_max_ancestry", existing_type=sa.VARCHAR(), nullable=True)
24+
# ### end Alembic commands ###
25+
26+
27+
def downgrade():
28+
# ### commands auto generated by Alembic - please adjust! ###
29+
op.alter_column("gnomad_variants", "faf95_max_ancestry", existing_type=sa.VARCHAR(), nullable=False)
30+
op.alter_column("gnomad_variants", "faf95_max", existing_type=sa.DOUBLE_PRECISION(precision=53), nullable=False)
31+
# ### end Alembic commands ###
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
"""add uniprot id field for mapped metadata
2+
3+
Revision ID: 4b9fb51a460d
4+
Revises: aa3933cf0cb3
5+
Create Date: 2025-07-24 16:07:51.548202
6+
7+
"""
8+
9+
from alembic import op
10+
import sqlalchemy as sa
11+
12+
13+
# revision identifiers, used by Alembic.
14+
revision = "4b9fb51a460d"
15+
down_revision = "aa3933cf0cb3"
16+
branch_labels = None
17+
depends_on = None
18+
19+
20+
def upgrade():
21+
# ### commands auto generated by Alembic - please adjust! ###
22+
op.add_column("target_genes", sa.Column("uniprot_id_from_mapped_metadata", sa.String(), nullable=True))
23+
# ### end Alembic commands ###
24+
25+
26+
def downgrade():
27+
# ### commands auto generated by Alembic - please adjust! ###
28+
op.drop_column("target_genes", "uniprot_id_from_mapped_metadata")
29+
# ### end Alembic commands ###
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
"""add gnomad variants table
2+
3+
Revision ID: 79b28316dd0c
4+
Revises: 4b9fb51a460d
5+
Create Date: 2025-06-17 08:38:51.954238
6+
7+
"""
8+
9+
from alembic import op
10+
import sqlalchemy as sa
11+
12+
13+
# revision identifiers, used by Alembic.
14+
revision = "79b28316dd0c"
15+
down_revision = "4b9fb51a460d"
16+
branch_labels = None
17+
depends_on = None
18+
19+
20+
def upgrade():
21+
# ### commands auto generated by Alembic - please adjust! ###
22+
op.create_table(
23+
"gnomad_variants",
24+
sa.Column("id", sa.Integer(), nullable=False),
25+
sa.Column("db_name", sa.String(), nullable=False),
26+
sa.Column("db_identifier", sa.String(), nullable=False),
27+
sa.Column("db_version", sa.String(), nullable=False),
28+
sa.Column("allele_count", sa.Integer(), nullable=False),
29+
sa.Column("allele_number", sa.Integer(), nullable=False),
30+
sa.Column("allele_frequency", sa.Float(), nullable=False),
31+
sa.Column("faf95_max", sa.Float(), nullable=False),
32+
sa.Column("faf95_max_ancestry", sa.String(), nullable=False),
33+
sa.Column("creation_date", sa.Date(), nullable=False),
34+
sa.Column("modification_date", sa.Date(), nullable=False),
35+
sa.PrimaryKeyConstraint("id"),
36+
)
37+
op.create_index(op.f("ix_gnomad_variants_db_identifier"), "gnomad_variants", ["db_identifier"], unique=False)
38+
# ### end Alembic commands ###
39+
40+
41+
def downgrade():
42+
# ### commands auto generated by Alembic - please adjust! ###
43+
op.drop_index(op.f("ix_gnomad_variants_db_identifier"), table_name="gnomad_variants")
44+
op.drop_table("gnomad_variants")
45+
# ### end Alembic commands ###

0 commit comments

Comments
 (0)