Skip to content

Commit c3e03d8

Browse files
committed
More moving shared endpoints
1 parent ed99d47 commit c3e03d8

File tree

5 files changed

+107
-77
lines changed

5 files changed

+107
-77
lines changed

src/murfey/client/multigrid_control.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,11 @@
2020
from murfey.client.rsync import RSyncer, RSyncerUpdate, TransferResult
2121
from murfey.client.tui.screens import determine_default_destination
2222
from murfey.client.watchdir import DirWatcher
23+
from murfey.server.api.file_manip import router as file_manip_router
24+
from murfey.server.api.session_control import router as session_router
25+
from murfey.server.api.workflow import router as workflow_router
26+
from murfey.server.api.workflow import spa_router as workflow_spa_router
27+
from murfey.server.api.workflow import tomo_router as workflow_tomo_router
2328
from murfey.util import posix_path
2429
from murfey.util.client import capture_post, get_machine_config_client
2530

@@ -114,7 +119,7 @@ async def dormancy_check(self):
114119
):
115120
async with aiohttp.ClientSession() as clientsession:
116121
async with clientsession.delete(
117-
f"{self._environment.url.geturl()}/sessions/{self.session_id}",
122+
f"{self._environment.url.geturl()}{session_router.url_path_for('remove_session', session_id=self.session_id)}",
118123
json={"access_token": self.token, "token_type": "bearer"},
119124
) as response:
120125
success = response.status == 200
@@ -153,7 +158,7 @@ def _start_rsyncer_multigrid(
153158
log.info(f"starting multigrid rsyncer: {source}")
154159
destination_overrides = destination_overrides or {}
155160
machine_data = requests.get(
156-
f"{self._environment.url.geturl()}/instruments/{self.instrument_name}/machine"
161+
f"{self._environment.url.geturl()}{session_router.url_path_for('machine_info_by_instrument', instrument_name=self.instrument_name)}"
157162
).json()
158163
if destination_overrides.get(source):
159164
destination = (
@@ -428,7 +433,7 @@ def _start_dc(self, json, from_form: bool = False):
428433
log.info("Registering tomography processing parameters")
429434
if self._environment.data_collection_parameters.get("num_eer_frames"):
430435
eer_response = requests.post(
431-
f"{str(self._environment.url.geturl())}/visits/{self._environment.visit}/{self._environment.murfey_session}/eer_fractionation_file",
436+
f"{str(self._environment.url.geturl())}{file_manip_router.url_path_for('write_eer_fractionation_file', visit_name=self._environment.visit, session_id=self._environment.murfey_session)}",
432437
json={
433438
"num_frames": self._environment.data_collection_parameters[
434439
"num_eer_frames"
@@ -445,17 +450,17 @@ def _start_dc(self, json, from_form: bool = False):
445450
eer_fractionation_file = eer_response.json()["eer_fractionation_file"]
446451
json.update({"eer_fractionation_file": eer_fractionation_file})
447452
capture_post(
448-
f"{self._environment.url.geturl()}/sessions/{self._environment.murfey_session}/tomography_processing_parameters",
453+
f"{self._environment.url.geturl()}{workflow_tomo_router.url_path_for('register_tomo_proc_params', session_id=self._environment.murfey_session)}",
449454
json=json,
450455
)
451456
capture_post(
452-
f"{self._environment.url.geturl()}/visits/{self._environment.visit}/{self._environment.murfey_session}/flush_tomography_processing",
457+
f"{self._environment.url.geturl()}{workflow_tomo_router.url_path_for('flush_tomography_processing', visit_name=self._environment.visit, session_id=self._environment.murfey_session)}",
453458
json={"rsync_source": str(source)},
454459
)
455460
log.info("Tomography processing flushed")
456461

457462
elif isinstance(context, SPAModularContext):
458-
url = f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/register_data_collection_group"
463+
url = f"{str(self._environment.url.geturl())}{workflow_router.url_path_for('register_dc_group', visit_name=self._environment.visit, session_id=self.session_id)}"
459464
dcg_data = {
460465
"experiment_type": "single particle",
461466
"experiment_type_id": 37,
@@ -494,7 +499,7 @@ def _start_dc(self, json, from_form: bool = False):
494499
"phase_plate": json.get("phase_plate", False),
495500
}
496501
capture_post(
497-
f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/start_data_collection",
502+
f"{str(self._environment.url.geturl())}{workflow_spa_router.url_path_for('start_dc', visit_name=self._environment.visit, session_id=self.session_id)}",
498503
json=data,
499504
)
500505
for recipe in (
@@ -505,7 +510,7 @@ def _start_dc(self, json, from_form: bool = False):
505510
"em-spa-refine",
506511
):
507512
capture_post(
508-
f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/register_processing_job",
513+
f"{str(self._environment.url.geturl())}{workflow_router.url_path_for('register_proc', visit_name=self._environment.visit, session_id=self.session_id)}",
509514
json={
510515
"tag": str(source),
511516
"source": str(source),
@@ -514,7 +519,7 @@ def _start_dc(self, json, from_form: bool = False):
514519
)
515520
log.info(f"Posting SPA processing parameters: {json}")
516521
response = capture_post(
517-
f"{self._environment.url.geturl()}/sessions/{self.session_id}/spa_processing_parameters",
522+
f"{self._environment.url.geturl()}{workflow_router.url_path_for('register_spa_proc_params', session_id=self.session_id)}",
518523
json={
519524
**{k: None if v == "None" else v for k, v in json.items()},
520525
"tag": str(source),
@@ -523,7 +528,7 @@ def _start_dc(self, json, from_form: bool = False):
523528
if response and not str(response.status_code).startswith("2"):
524529
log.warning(f"{response.reason}")
525530
capture_post(
526-
f"{self._environment.url.geturl()}/visits/{self._environment.visit}/{self.session_id}/flush_spa_processing",
531+
f"{self._environment.url.geturl()}{workflow_spa_router.url_path_for('flush_spa_processing', visit_name=self._environment.visit, session_id=self.session_id)}",
527532
json={"tag": str(source)},
528533
)
529534

src/murfey/server/api/__init__.py

Lines changed: 0 additions & 65 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
)
2727
from murfey.server.api.auth import MurfeySessionID, validate_token
2828
from murfey.server.murfey_db import murfey_db
29-
from murfey.util import safe_run
3029
from murfey.util.db import (
3130
ClientEnvironment,
3231
DataCollection,
@@ -424,70 +423,6 @@ async def send_murfey_message(instrument_name: str, msg: RegistrationMessage):
424423
)
425424

426425

427-
@router.delete("/sessions/{session_id}")
428-
def remove_session_by_id(session_id: MurfeySessionID, db=murfey_db):
429-
session = db.exec(select(Session).where(Session.id == session_id)).one()
430-
sessions_for_visit = db.exec(
431-
select(Session).where(Session.visit == session.visit)
432-
).all()
433-
# Don't remove prometheus metrics if there are other sessions using them
434-
if len(sessions_for_visit) == 1:
435-
safe_run(
436-
prom.monitoring_switch.remove,
437-
args=(session.visit,),
438-
label="monitoring_switch",
439-
)
440-
rsync_instances = db.exec(
441-
select(RsyncInstance).where(RsyncInstance.session_id == session_id)
442-
).all()
443-
for ri in rsync_instances:
444-
safe_run(
445-
prom.seen_files.remove,
446-
args=(ri.source, session.visit),
447-
label="seen_files",
448-
)
449-
safe_run(
450-
prom.transferred_files.remove,
451-
args=(ri.source, session.visit),
452-
label="transferred_files",
453-
)
454-
safe_run(
455-
prom.transferred_files_bytes.remove,
456-
args=(ri.source, session.visit),
457-
label="transferred_files_bytes",
458-
)
459-
safe_run(
460-
prom.seen_data_files.remove,
461-
args=(ri.source, session.visit),
462-
label="seen_data_files",
463-
)
464-
safe_run(
465-
prom.transferred_data_files.remove,
466-
args=(ri.source, session.visit),
467-
label="transferred_data_files",
468-
)
469-
safe_run(
470-
prom.transferred_data_files_bytes.remove,
471-
args=(ri.source, session.visit),
472-
label="transferred_data_file_bytes",
473-
)
474-
collected_ids = db.exec(
475-
select(DataCollectionGroup, DataCollection, ProcessingJob)
476-
.where(DataCollectionGroup.session_id == session_id)
477-
.where(DataCollection.dcg_id == DataCollectionGroup.id)
478-
.where(ProcessingJob.dc_id == DataCollection.id)
479-
).all()
480-
for c in collected_ids:
481-
safe_run(
482-
prom.preprocessed_movies.remove,
483-
args=(c[2].id,),
484-
label="preprocessed_movies",
485-
)
486-
db.delete(session)
487-
db.commit()
488-
return
489-
490-
491426
@router.post("/visits/{year}/{visit_name}/{session_id}/make_milling_gif")
492427
async def make_gif(
493428
year: int,

src/murfey/server/api/session_control.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,10 @@
88
import murfey.server.ispyb
99
from murfey.server import _transport_object
1010
from murfey.server.api.auth import MurfeySessionID, validate_token
11-
from murfey.server.api.shared import get_machine_config_for_instrument
11+
from murfey.server.api.shared import (
12+
get_machine_config_for_instrument,
13+
remove_session_by_id,
14+
)
1215
from murfey.server.murfey_db import murfey_db
1316
from murfey.util.config import MachineConfig
1417
from murfey.util.db import (
@@ -67,6 +70,11 @@ def link_client_to_session(
6770
return sid
6871

6972

73+
@router.delete("/sessions/{session_id}")
74+
def remove_session(session_id: MurfeySessionID, db=murfey_db):
75+
remove_session_by_id(session_id, db)
76+
77+
7078
@router.post("/sessions/{session_id}/successful_processing")
7179
def register_processing_success_in_ispyb(
7280
session_id: MurfeySessionID, db=murfey.server.ispyb.DB, murfey_db=murfey_db

src/murfey/server/api/session_info.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,10 @@
1111
import murfey.server.ispyb
1212
from murfey.server import sanitise, templates
1313
from murfey.server.api.auth import MurfeySessionID, validate_token
14-
from murfey.server.api.shared import get_machine_config_for_instrument
14+
from murfey.server.api.shared import (
15+
get_machine_config_for_instrument,
16+
remove_session_by_id,
17+
)
1518
from murfey.server.murfey_db import murfey_db
1619
from murfey.util.config import MachineConfig, get_machine_config
1720
from murfey.util.db import (
@@ -117,6 +120,11 @@ async def get_sessions(db=murfey_db):
117120
return res
118121

119122

123+
@router.delete("/sessions/{session_id}")
124+
def remove_session(session_id: MurfeySessionID, db=murfey_db):
125+
remove_session_by_id(session_id, db)
126+
127+
120128
@router.get("/sessions/{session_id}/upstream_visits")
121129
async def find_upstream_visits(session_id: MurfeySessionID, db=murfey_db):
122130
murfey_session = db.exec(select(Session).where(Session.id == session_id)).one()

src/murfey/server/api/shared.py

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,18 @@
22
from pathlib import Path
33
from typing import Optional
44

5+
from sqlmodel import select
6+
7+
import murfey.server.prometheus as prom
8+
from murfey.util import safe_run
59
from murfey.util.config import MachineConfig, from_file, settings
10+
from murfey.util.db import (
11+
DataCollection,
12+
DataCollectionGroup,
13+
ProcessingJob,
14+
RsyncInstance,
15+
Session,
16+
)
617

718

819
@lru_cache(maxsize=5)
@@ -12,3 +23,66 @@ def get_machine_config_for_instrument(instrument_name: str) -> Optional[MachineC
1223
instrument_name
1324
]
1425
return None
26+
27+
28+
def remove_session_by_id(session_id: int, db):
29+
session = db.exec(select(Session).where(Session.id == session_id)).one()
30+
sessions_for_visit = db.exec(
31+
select(Session).where(Session.visit == session.visit)
32+
).all()
33+
# Don't remove prometheus metrics if there are other sessions using them
34+
if len(sessions_for_visit) == 1:
35+
safe_run(
36+
prom.monitoring_switch.remove,
37+
args=(session.visit,),
38+
label="monitoring_switch",
39+
)
40+
rsync_instances = db.exec(
41+
select(RsyncInstance).where(RsyncInstance.session_id == session_id)
42+
).all()
43+
for ri in rsync_instances:
44+
safe_run(
45+
prom.seen_files.remove,
46+
args=(ri.source, session.visit),
47+
label="seen_files",
48+
)
49+
safe_run(
50+
prom.transferred_files.remove,
51+
args=(ri.source, session.visit),
52+
label="transferred_files",
53+
)
54+
safe_run(
55+
prom.transferred_files_bytes.remove,
56+
args=(ri.source, session.visit),
57+
label="transferred_files_bytes",
58+
)
59+
safe_run(
60+
prom.seen_data_files.remove,
61+
args=(ri.source, session.visit),
62+
label="seen_data_files",
63+
)
64+
safe_run(
65+
prom.transferred_data_files.remove,
66+
args=(ri.source, session.visit),
67+
label="transferred_data_files",
68+
)
69+
safe_run(
70+
prom.transferred_data_files_bytes.remove,
71+
args=(ri.source, session.visit),
72+
label="transferred_data_file_bytes",
73+
)
74+
collected_ids = db.exec(
75+
select(DataCollectionGroup, DataCollection, ProcessingJob)
76+
.where(DataCollectionGroup.session_id == session_id)
77+
.where(DataCollection.dcg_id == DataCollectionGroup.id)
78+
.where(ProcessingJob.dc_id == DataCollection.id)
79+
).all()
80+
for c in collected_ids:
81+
safe_run(
82+
prom.preprocessed_movies.remove,
83+
args=(c[2].id,),
84+
label="preprocessed_movies",
85+
)
86+
db.delete(session)
87+
db.commit()
88+
return

0 commit comments

Comments
 (0)