Skip to content

Commit 32fc2b0

Browse files
authored
Merge pull request #1 from DiamondLightSource/feature/lims-1669/parent-child
[LIMS-1669] Allow samples to have ancestors/descendants
1 parent a4f73b9 commit 32fc2b0

File tree

12 files changed

+366
-42
lines changed

12 files changed

+366
-42
lines changed

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ WORKDIR /project
1919

2020
# make the wheel outside of the venv so 'build' does not dirty requirements.txt
2121
RUN pip install --upgrade pip build && \
22-
#export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \
22+
export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \
2323
python -m build && \
2424
touch requirements.txt
2525

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
"""Add sample parent/child relationship
2+
3+
Revision ID: 297144dfe234
4+
Revises: 7325165750bc
5+
Create Date: 2025-03-13 11:02:07.757527
6+
7+
"""
8+
from typing import Sequence, Union
9+
10+
from alembic import op
11+
import sqlalchemy as sa
12+
13+
14+
# revision identifiers, used by Alembic.
15+
revision: str = '297144dfe234'
16+
down_revision: Union[str, None] = '7325165750bc'
17+
branch_labels: Union[str, Sequence[str], None] = None
18+
depends_on: Union[str, Sequence[str], None] = None
19+
20+
21+
def upgrade() -> None:
22+
# ### commands auto generated by Alembic - please adjust! ###
23+
op.create_table('SampleParentChild',
24+
sa.Column('parentId', sa.Integer(), nullable=False, comment='Sample(s) from which the child(ren) was derived from'),
25+
sa.Column('childId', sa.Integer(), nullable=False, comment='Sample(s) derived from parent(s)'),
26+
sa.Column('creationDate', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
27+
sa.ForeignKeyConstraint(['childId'], ['Sample.sampleId'], ),
28+
sa.ForeignKeyConstraint(['parentId'], ['Sample.sampleId'], ),
29+
sa.PrimaryKeyConstraint('parentId', 'childId', name='parent_child_pk')
30+
)
31+
op.create_index(op.f('ix_SampleParentChild_childId'), 'SampleParentChild', ['childId'], unique=False)
32+
op.create_index(op.f('ix_SampleParentChild_parentId'), 'SampleParentChild', ['parentId'], unique=False)
33+
# ### end Alembic commands ###
34+
35+
36+
def downgrade() -> None:
37+
# ### commands auto generated by Alembic - please adjust! ###
38+
op.drop_index(op.f('ix_SampleParentChild_parentId'), table_name='SampleParentChild')
39+
op.drop_index(op.f('ix_SampleParentChild_childId'), table_name='SampleParentChild')
40+
op.drop_table('SampleParentChild')
41+
# ### end Alembic commands ###

database/data.sql

Lines changed: 84 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -192,6 +192,33 @@ COMMENT ON COLUMN public."Sample"."externalId" IS 'Item ID in ISPyB';
192192
COMMENT ON COLUMN public."Sample"."subLocation" IS 'Additional location, such as cassette slot or multi-sample pin position';
193193

194194

195+
--
196+
-- Name: SampleParentChild; Type: TABLE; Schema: public; Owner: sample_handling
197+
--
198+
199+
CREATE TABLE public."SampleParentChild" (
200+
"parentId" integer NOT NULL,
201+
"childId" integer NOT NULL,
202+
"creationDate" timestamp with time zone DEFAULT now() NOT NULL
203+
);
204+
205+
206+
ALTER TABLE public."SampleParentChild" OWNER TO sample_handling;
207+
208+
--
209+
-- Name: COLUMN "SampleParentChild"."parentId"; Type: COMMENT; Schema: public; Owner: sample_handling
210+
--
211+
212+
COMMENT ON COLUMN public."SampleParentChild"."parentId" IS 'Sample(s) from which the child(ren) was derived from';
213+
214+
215+
--
216+
-- Name: COLUMN "SampleParentChild"."childId"; Type: COMMENT; Schema: public; Owner: sample_handling
217+
--
218+
219+
COMMENT ON COLUMN public."SampleParentChild"."childId" IS 'Sample(s) derived from parent(s)';
220+
221+
195222
--
196223
-- Name: Sample_sampleId_seq; Type: SEQUENCE; Schema: public; Owner: sample_handling
197224
--
@@ -389,6 +416,8 @@ COPY public."Container" ("containerId", "shipmentId", "topLevelContainerId", "pa
389416
1307 204 \N 1336 gridBox 4 2 \N f \N Grid_Box_01 \N \N f f \N 2025-01-10 08:54:42.073855+00
390417
1335 204 \N 1336 gridBox 4 3 \N f \N Grid_Box_02 \N \N f f \N 2025-01-10 08:54:42.073855+00
391418
648 97 \N 646 gridBox 4 1 \N f \N Grid_Box_02 \N \N f f \N 2025-01-10 08:54:42.073855+00
419+
1904 229 \N 1901 gridBox 4 1 \N f \N Grid_Box_01 \N \N f f \N 2025-01-10 08:54:42.073855+00
420+
1901 229 720 \N puck 4 \N \N f \N Puck_01 \N \N f f \N 2025-01-10 08:54:42.073855+00
392421
\.
393422

394423

@@ -415,6 +444,16 @@ COPY public."Sample" ("sampleId", "shipmentId", "proteinId", type, location, det
415444
612 117 338108 grid \N {"buffer": "", "concentration": "", "foil": "Quantifoil copper", "film": "Holey carbon", "mesh": "200", "hole": "R 0.6/1", "vitrification": "GP2", "vitrificationConditions": ""} 788 3P_1 \N \N 2 2025-01-10 08:54:42.073855+00
416445
3 1 4407 sample 1 {"details": null, "shipmentId": 1, "foil": "Quantifoil copper", "film": "Holey carbon", "mesh": "200", "hole": "R 0.6/1", "vitrification": "GP2"} 4 Sample_02 6186947 \N 1 2025-01-10 08:54:42.073855+00
417446
561 117 338108 grid 1 {"buffer": "", "concentration": "", "foil": "Quantifoil copper", "film": "Holey carbon", "mesh": "200", "hole": "R 0.6/1", "vitrification": "GP2", "vitrificationConditions": ""} 776 3P_1 6212665 \N 1 2025-01-10 08:54:42.073855+00
447+
1877 229 338108 grid 1 {"buffer": "", "concentration": "", "foil": "Quantifoil copper", "film": "Holey carbon", "mesh": "200", "hole": "R 0.6/1", "vitrification": "GP2", "vitrificationConditions": ""} 1904 3P_1 \N \N 1 2025-01-10 08:54:42.073855+00
448+
\.
449+
450+
451+
--
452+
-- Data for Name: SampleParentChild; Type: TABLE DATA; Schema: public; Owner: sample_handling
453+
--
454+
455+
COPY public."SampleParentChild" ("parentId", "childId", "creationDate") FROM stdin;
456+
612 1877 2025-03-13 09:49:12.797986+00
418457
\.
419458

420459

@@ -449,6 +488,7 @@ COPY public."TopLevelContainer" ("topLevelContainerId", "shipmentId", details, c
449488
171 106 \N DLS-4 dewar Dewar_06 20 \N f 1100af88-2e0b-46a7-93f9-2737a0b23d0c 2025-01-10 08:54:42.073855+00
450489
199 117 {} DLS-BI-0020 dewar DLS-BI-0020 72181 f 1100af88-2e0b-46a7-93f9-2737a0b23d0c 2025-01-10 08:54:42.073855+00
451490
221 \N {} DLS-BI-0020 dewar DLS-BI-0020 \N t 1100af88-2e0b-46a7-93f9-2737a0b23d0c 2025-01-10 08:54:42.073855+00
491+
720 229 {} DLS-BI-0020 dewar DLS-BI-0020 \N f 1100af88-2e0b-46a7-93f9-2737a0b23d0c 2025-01-10 08:54:42.073855+00
452492
\.
453493

454494

@@ -457,43 +497,43 @@ COPY public."TopLevelContainer" ("topLevelContainerId", "shipmentId", details, c
457497
--
458498

459499
COPY public.alembic_version (version_num) FROM stdin;
460-
7325165750bc
500+
297144dfe234
461501
\.
462502

463503

464504
--
465505
-- Name: Container_containerId_seq; Type: SEQUENCE SET; Schema: public; Owner: sample_handling
466506
--
467507

468-
SELECT pg_catalog.setval('public."Container_containerId_seq"', 1898, true);
508+
SELECT pg_catalog.setval('public."Container_containerId_seq"', 2039, true);
469509

470510

471511
--
472512
-- Name: PreSession_preSessionId_seq; Type: SEQUENCE SET; Schema: public; Owner: sample_handling
473513
--
474514

475-
SELECT pg_catalog.setval('public."PreSession_preSessionId_seq"', 349, true);
515+
SELECT pg_catalog.setval('public."PreSession_preSessionId_seq"', 379, true);
476516

477517

478518
--
479519
-- Name: Sample_sampleId_seq; Type: SEQUENCE SET; Schema: public; Owner: sample_handling
480520
--
481521

482-
SELECT pg_catalog.setval('public."Sample_sampleId_seq"', 1876, true);
522+
SELECT pg_catalog.setval('public."Sample_sampleId_seq"', 2096, true);
483523

484524

485525
--
486526
-- Name: Shipment_shipmentId_seq; Type: SEQUENCE SET; Schema: public; Owner: sample_handling
487527
--
488528

489-
SELECT pg_catalog.setval('public."Shipment_shipmentId_seq"', 271, true);
529+
SELECT pg_catalog.setval('public."Shipment_shipmentId_seq"', 286, true);
490530

491531

492532
--
493533
-- Name: TopLevelContainer_topLevelContainerId_seq; Type: SEQUENCE SET; Schema: public; Owner: sample_handling
494534
--
495535

496-
SELECT pg_catalog.setval('public."TopLevelContainer_topLevelContainerId_seq"', 719, true);
536+
SELECT pg_catalog.setval('public."TopLevelContainer_topLevelContainerId_seq"', 825, true);
497537

498538

499539
--
@@ -616,6 +656,14 @@ ALTER TABLE ONLY public.alembic_version
616656
ADD CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num);
617657

618658

659+
--
660+
-- Name: SampleParentChild parent_child_pk; Type: CONSTRAINT; Schema: public; Owner: sample_handling
661+
--
662+
663+
ALTER TABLE ONLY public."SampleParentChild"
664+
ADD CONSTRAINT parent_child_pk PRIMARY KEY ("parentId", "childId");
665+
666+
619667
--
620668
-- Name: ix_Container_containerId; Type: INDEX; Schema: public; Owner: sample_handling
621669
--
@@ -651,6 +699,20 @@ CREATE INDEX "ix_PreSession_preSessionId" ON public."PreSession" USING btree ("p
651699
CREATE UNIQUE INDEX "ix_PreSession_shipmentId" ON public."PreSession" USING btree ("shipmentId");
652700

653701

702+
--
703+
-- Name: ix_SampleParentChild_childId; Type: INDEX; Schema: public; Owner: sample_handling
704+
--
705+
706+
CREATE INDEX "ix_SampleParentChild_childId" ON public."SampleParentChild" USING btree ("childId");
707+
708+
709+
--
710+
-- Name: ix_SampleParentChild_parentId; Type: INDEX; Schema: public; Owner: sample_handling
711+
--
712+
713+
CREATE INDEX "ix_SampleParentChild_parentId" ON public."SampleParentChild" USING btree ("parentId");
714+
715+
654716
--
655717
-- Name: ix_Sample_containerId; Type: INDEX; Schema: public; Owner: sample_handling
656718
--
@@ -746,6 +808,22 @@ ALTER TABLE ONLY public."PreSession"
746808
ADD CONSTRAINT "PreSession_shipmentId_fkey" FOREIGN KEY ("shipmentId") REFERENCES public."Shipment"("shipmentId");
747809

748810

811+
--
812+
-- Name: SampleParentChild SampleParentChild_childId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: sample_handling
813+
--
814+
815+
ALTER TABLE ONLY public."SampleParentChild"
816+
ADD CONSTRAINT "SampleParentChild_childId_fkey" FOREIGN KEY ("childId") REFERENCES public."Sample"("sampleId");
817+
818+
819+
--
820+
-- Name: SampleParentChild SampleParentChild_parentId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: sample_handling
821+
--
822+
823+
ALTER TABLE ONLY public."SampleParentChild"
824+
ADD CONSTRAINT "SampleParentChild_parentId_fkey" FOREIGN KEY ("parentId") REFERENCES public."Sample"("sampleId");
825+
826+
749827
--
750828
-- Name: Sample Sample_containerId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: sample_handling
751829
--

src/scaup/crud/containers.py

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
from fastapi import HTTPException, status
22
from lims_utils.models import ProposalReference
33
from sqlalchemy import func, insert, select, update
4+
from sqlalchemy.orm import aliased
45

5-
from ..models.containers import ContainerIn, OptionalContainer
6+
from ..models.containers import ContainerIn, ContainerOut, OptionalContainer
67
from ..models.inner_db.tables import Container, Sample, Shipment
78
from ..utils.crud import assert_not_booked, edit_item
89
from ..utils.database import inner_db, paginate
@@ -24,6 +25,39 @@ def create_container(params: ContainerIn, shipmentId: int | None = None):
2425
return container
2526

2627

28+
def get_container(container_id: int):
29+
container = inner_db.session.scalar(select(Container).filter(Container.id == container_id))
30+
validated_container = ContainerOut.model_validate(container)
31+
32+
# Internal containers are not in storage dewars/containers, but instead in transport containers.
33+
# This means that their "parent" top level sample collection is a shipment, not a storage dewar.
34+
if not validated_container.isInternal:
35+
return validated_container
36+
37+
# Anchoring to parent member, to avoid one extra recursion layer
38+
anchor_member = (
39+
select(Container.parentId, Container.topLevelContainerId)
40+
.filter(Container.id == validated_container.parentId)
41+
.cte(name="anchor_member", recursive=True)
42+
)
43+
44+
anchor_member_alias = anchor_member.alias("anchor_member_alias")
45+
container_alias = aliased(Container)
46+
47+
anchor_member = anchor_member.union_all(
48+
select(container_alias.parentId, container_alias.topLevelContainerId)
49+
.filter(container_alias.id == anchor_member_alias.c.parentId)
50+
.filter(anchor_member_alias.c.topLevelContainerId.is_(None))
51+
)
52+
53+
tlc_id = inner_db.session.scalar(
54+
select(anchor_member.c.topLevelContainerId).filter(anchor_member.c.topLevelContainerId.is_not(None))
55+
)
56+
57+
validated_container.internalStorageContainer = tlc_id
58+
return validated_container
59+
60+
2761
def get_containers(
2862
limit: int,
2963
page: int,

src/scaup/crud/samples.py

Lines changed: 38 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -5,16 +5,19 @@
55
from lims_utils.models import Paged, ProposalReference
66
from sqlalchemy import and_, insert, select
77

8-
from ..models.inner_db.tables import Container, Sample, Shipment
8+
from ..models.inner_db.tables import Container, Sample, SampleParentChild, Shipment
99
from ..models.samples import OptionalSample, SampleIn, SampleOut
10+
from ..utils.config import Config
1011
from ..utils.crud import assert_not_booked, edit_item
11-
from ..utils.database import inner_db, paginate, unravel
12+
from ..utils.database import inner_db, paginate
1213
from ..utils.external import ExternalRequest
1314
from ..utils.session import retry_if_exists
1415

1516

1617
def _get_protein(proteinId: int, token):
17-
upstream_compound = ExternalRequest.request(token=token, url=f"/proteins/{proteinId}")
18+
upstream_compound = ExternalRequest.request(
19+
token=token, url=f"/proteins/{proteinId}"
20+
)
1821

1922
if upstream_compound.status_code != 200:
2023
app_logger.error(
@@ -79,6 +82,16 @@ def create_sample(shipmentId: int, params: SampleIn, token: str):
7982
],
8083
).all()
8184

85+
if params.parents:
86+
inner_db.session.execute(
87+
insert(SampleParentChild),
88+
[
89+
{"childId": child.id, "parentId": parent}
90+
for child in samples
91+
for parent in params.parents
92+
],
93+
)
94+
8295
inner_db.session.commit()
8396
return Paged(items=samples, total=params.copies, page=0, limit=params.copies)
8497

@@ -103,13 +116,13 @@ def get_samples(
103116
):
104117
query = (
105118
select(
106-
*unravel(Sample),
107-
Container.name.label("parent"),
119+
Sample,
120+
Container.name.label("containerName"),
108121
Shipment.name.label("parentShipmentName"),
109122
)
110123
.select_from(Shipment)
111124
.join(Sample, Sample.shipmentId == Shipment.id)
112-
.join(Container, isouter=True)
125+
.join(Container, Container.id == Sample.containerId, isouter=True)
113126
)
114127

115128
if shipment_id:
@@ -133,34 +146,42 @@ def get_samples(
133146
query = query.filter(Container.isInternal.is_not(True))
134147

135148
query = query.order_by(Container.name, Container.location, Sample.location)
136-
samples = paginate(query, limit, page, slow_count=False)
149+
samples = paginate(query, limit, page, slow_count=True, scalar=True)
137150

138151
if ignore_external or token is None:
139152
return samples
140153

141-
ext_shipment_id = inner_db.session.scalar(select(Shipment.externalId).filter(Shipment.id == shipment_id))
154+
ext_shipment_id = inner_db.session.scalar(
155+
select(Shipment.externalId).filter(Shipment.id == shipment_id)
156+
)
142157

143158
if ext_shipment_id is None:
144159
return samples
145160

146-
ext_samples = ExternalRequest.request(token, method="GET", url=f"/shipments/{ext_shipment_id}/samples")
161+
ext_samples = ExternalRequest.request(
162+
Config.ispyb_api.jwt, method="GET", url=f"/shipments/{ext_shipment_id}/samples?limit=100"
163+
)
147164

148165
if ext_samples.status_code != 200:
149-
app_logger.warning("Expeye returned %i: %s", ext_samples.status_code, ext_samples.text)
166+
app_logger.warning(
167+
"Expeye returned %i: %s", ext_samples.status_code, ext_samples.text
168+
)
150169
return samples
151170

171+
validated_samples = Paged[SampleOut].model_validate(samples, from_attributes=True)
172+
152173
for ext_sample in ext_samples.json()["items"]:
153174
if ext_sample["dataCollectionGroupId"]:
154175
try:
155-
i, sample = next(
156-
(i, sample)
157-
for i, sample in enumerate(samples.items)
176+
i = next(
177+
i
178+
for i, sample in enumerate(validated_samples.items)
158179
if sample.externalId == ext_sample["blSampleId"]
159180
)
160-
new_sample = SampleOut.model_validate(sample, from_attributes=True)
161-
new_sample.dataCollectionGroupId = ext_sample["dataCollectionGroupId"]
162-
samples.items[i] = new_sample
181+
validated_samples.items[i].dataCollectionGroupId = ext_sample[
182+
"dataCollectionGroupId"
183+
]
163184
except StopIteration:
164185
pass
165186

166-
return samples
187+
return validated_samples

0 commit comments

Comments
 (0)