Skip to content

Commit 4da0bc9

Browse files
committed
Merged recent changes from 'main' branch and resolved conflicts
2 parents 21e96ce + f9739aa commit 4da0bc9

File tree

14 files changed

+225
-375
lines changed

14 files changed

+225
-375
lines changed

src/murfey/client/contexts/spa.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -570,7 +570,7 @@ def _position_analysis(
570570
grid_square = _grid_square_from_file(transferred_file)
571571
grid_square_metadata_file = _grid_square_metadata_file(
572572
transferred_file,
573-
machine_config["data_directories"],
573+
[Path(p) for p in machine_config["data_directories"]],
574574
environment.visit,
575575
grid_square,
576576
)
@@ -921,6 +921,7 @@ def _register_processing_job(
921921
)
922922
msg: Dict[str, Any] = {
923923
"tag": tag,
924+
"source": tag,
924925
"recipe": "ispyb-relion",
925926
"parameters": {
926927
"acquisition_software": parameters["acquisition_software"],

src/murfey/client/contexts/spa_metadata.py

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
import requests
66
import xmltodict
7-
from PIL import Image
87

98
from murfey.client.context import Context
109
from murfey.client.contexts.spa import _get_grid_square_atlas_positions, _get_source
@@ -85,17 +84,9 @@ def post_transfer(
8584
atlas_original_pixel_size = atlas_xml_data["MicroscopeImage"][
8685
"SpatialScale"
8786
]["pixelSize"]["x"]["numericValue"]
88-
readout_width = float(
89-
atlas_xml_data["MicroscopeImage"]["SpatialScale"]["pixelSize"]["x"][
90-
"numericValue"
91-
]
92-
)
9387

9488
# need to calculate the pixel size of the downscaled image
95-
atlas_im = Image.open(atlas_xml_path.with_suffix(".jpg"))
96-
atlas_pixel_size = atlas_original_pixel_size * (
97-
readout_width / atlas_im.width
98-
)
89+
atlas_pixel_size = atlas_original_pixel_size * 7.8
9990

10091
source = _get_source(
10192
visitless_path.parent / "Images-Disc1" / visitless_path.name,

src/murfey/client/contexts/tomo.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -375,6 +375,7 @@ def _add_tilt(
375375
proc_url,
376376
{
377377
"tag": tilt_series,
378+
"source": str(self._basepath),
378379
"recipe": "em-tomo-preprocess",
379380
"experiment_type": "tomography",
380381
},
@@ -385,6 +386,7 @@ def _add_tilt(
385386
proc_url,
386387
{
387388
"tag": tilt_series,
389+
"source": str(self._basepath),
388390
"recipe": "em-tomo-align",
389391
"experiment_type": "tomography",
390392
},
@@ -396,6 +398,7 @@ def _add_tilt(
396398
proc_url,
397399
json={
398400
"tag": tilt_series,
401+
"source": str(self._basepath),
399402
"recipe": "em-tomo-preprocess",
400403
"experiment_type": "tomography",
401404
},
@@ -404,6 +407,7 @@ def _add_tilt(
404407
proc_url,
405408
json={
406409
"tag": tilt_series,
410+
"source": str(self._basepath),
407411
"recipe": "em-tomo-align",
408412
"experiment_type": "tomography",
409413
},

src/murfey/client/multigrid_control.py

Lines changed: 80 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -366,16 +366,9 @@ def _start_dc(self, json, from_form: bool = False):
366366
f"{self._environment.url.geturl()}/clients/{self._environment.client_id}/tomography_processing_parameters",
367367
json=json,
368368
)
369+
369370
source = Path(json["source"])
370-
self._environment.listeners["data_collection_group_ids"] = {
371-
context._flush_data_collections
372-
}
373-
self._environment.listeners["data_collection_ids"] = {
374-
context._flush_processing_job
375-
}
376-
self._environment.listeners["autoproc_program_ids"] = {
377-
context._flush_preprocess
378-
}
371+
379372
self._environment.id_tag_registry["data_collection_group"].append(
380373
str(source)
381374
)
@@ -386,12 +379,85 @@ def _start_dc(self, json, from_form: bool = False):
386379
"tag": str(source),
387380
}
388381
requests.post(url, json=dcg_data)
382+
383+
data = {
384+
"voltage": json["voltage"],
385+
"pixel_size_on_image": json["pixel_size_on_image"],
386+
"experiment_type": json["experiment_type"],
387+
"image_size_x": json["image_size_x"],
388+
"image_size_y": json["image_size_y"],
389+
"file_extension": json["file_extension"],
390+
"acquisition_software": json["acquisition_software"],
391+
"image_directory": str(self._environment.default_destinations[source]),
392+
"tag": json["tilt_series_tag"],
393+
"source": str(source),
394+
"magnification": json["magnification"],
395+
"total_exposed_dose": json.get("total_exposed_dose"),
396+
"c2aperture": json.get("c2aperture"),
397+
"exposure_time": json.get("exposure_time"),
398+
"slit_width": json.get("slit_width"),
399+
"phase_plate": json.get("phase_plate", False),
400+
}
401+
capture_post(
402+
f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self._environment.murfey_session}/start_data_collection",
403+
json=data,
404+
)
405+
for recipe in ("em-tomo-preprocess", "em-tomo-align"):
406+
capture_post(
407+
f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self._environment.murfey_session}/register_processing_job",
408+
json={
409+
"tag": json["tilt_series_tag"],
410+
"source": str(source),
411+
"recipe": recipe,
412+
},
413+
)
414+
log.info("Registering tomography processing parameters")
415+
if self._environment.data_collection_parameters.get("num_eer_frames"):
416+
eer_response = requests.post(
417+
f"{str(self._environment.url.geturl())}/visits/{self._environment.visit}/{self._environment.murfey_session}/eer_fractionation_file",
418+
json={
419+
"num_frames": self._environment.data_collection_parameters[
420+
"num_eer_frames"
421+
],
422+
"fractionation": self._environment.data_collection_parameters[
423+
"eer_fractionation"
424+
],
425+
"dose_per_frame": self._environment.data_collection_parameters[
426+
"dose_per_frame"
427+
],
428+
"fractionation_file_name": "eer_fractionation_tomo.txt",
429+
},
430+
)
431+
eer_fractionation_file = eer_response.json()["eer_fractionation_file"]
432+
json.update({"eer_fractionation_file": eer_fractionation_file})
433+
requests.post(
434+
f"{self._environment.url.geturl()}/sessions/{self._environment.murfey_session}/tomography_preprocessing_parameters",
435+
json=json,
436+
)
437+
context._flush_data_collections()
438+
context._flush_processing_jobs()
439+
capture_post(
440+
f"{self._environment.url.geturl()}/visits/{self._environment.visit}/{self._environment.murfey_session}/flush_tomography_processing",
441+
json={"rsync_source": str(source)},
442+
)
443+
log.info("tomography processing flushed")
444+
389445
elif isinstance(context, SPAContext) or isinstance(context, SPAModularContext):
390446
url = f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/register_data_collection_group"
391447
dcg_data = {
392448
"experiment_type": "single particle",
393449
"experiment_type_id": 37,
394450
"tag": str(source),
451+
"atlas": (
452+
str(self._environment.samples[source].atlas)
453+
if self._environment.samples.get(source)
454+
else ""
455+
),
456+
"sample": (
457+
self._environment.samples[source].sample
458+
if self._environment.samples.get(source)
459+
else None
460+
),
395461
}
396462
capture_post(url, json=dcg_data)
397463
if from_form:
@@ -428,7 +494,11 @@ def _start_dc(self, json, from_form: bool = False):
428494
):
429495
capture_post(
430496
f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/register_processing_job",
431-
json={"tag": str(source), "recipe": recipe},
497+
json={
498+
"tag": str(source),
499+
"source": str(source),
500+
"recipe": recipe,
501+
},
432502
)
433503
log.info(f"Posting SPA processing parameters: {json}")
434504
response = capture_post(

src/murfey/client/tui/app.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -516,7 +516,11 @@ def _start_dc(self, json, from_form: bool = False):
516516
for recipe in ("em-tomo-preprocess", "em-tomo-align"):
517517
capture_post(
518518
f"{str(self._url.geturl())}/visits/{str(self._visit)}/{self._environment.murfey_session}/register_processing_job",
519-
json={"tag": json["tilt_series_tag"], "recipe": recipe},
519+
json={
520+
"tag": json["tilt_series_tag"],
521+
"source": str(source),
522+
"recipe": recipe,
523+
},
520524
)
521525
log.info("Registering tomography processing parameters")
522526
if self.app._environment.data_collection_parameters.get("num_eer_frames"):
@@ -600,7 +604,11 @@ def _start_dc(self, json, from_form: bool = False):
600604
):
601605
capture_post(
602606
f"{str(self._url.geturl())}/visits/{str(self._visit)}/{self._environment.murfey_session}/register_processing_job",
603-
json={"tag": str(source), "recipe": recipe},
607+
json={
608+
"tag": str(source),
609+
"source": str(source),
610+
"recipe": recipe,
611+
},
604612
)
605613
log.info(f"Posting SPA processing parameters: {json}")
606614
response = capture_post(

src/murfey/client/tui/screens.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -717,7 +717,11 @@ def on_button_pressed(self, event: Button.Pressed):
717717
if self._switch_status:
718718
self.app.install_screen(
719719
DirectorySelection(
720-
[p for p in machine_data.get("data_directories", []) if p.exists()]
720+
[
721+
p
722+
for p in machine_data.get("data_directories", [])
723+
if Path(p).exists()
724+
]
721725
),
722726
"directory-select",
723727
)

src/murfey/server/__init__.py

Lines changed: 25 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,12 @@
1111
from importlib.resources import files
1212
from pathlib import Path
1313
from threading import Thread
14-
from typing import Any, Dict, List, NamedTuple, Tuple
14+
from typing import Any, Dict, List, Literal, NamedTuple, Tuple
1515

1616
import graypy
1717
import mrcfile
1818
import numpy as np
1919
import uvicorn
20-
import workflows
2120
from backports.entry_points_selectable import entry_points
2221
from fastapi import Request
2322
from fastapi.templating import Jinja2Templates
@@ -42,6 +41,7 @@
4241
from sqlalchemy.orm.exc import ObjectDeletedError
4342
from sqlmodel import Session, create_engine, select
4443
from werkzeug.utils import secure_filename
44+
from workflows.transport.pika_transport import PikaTransport
4545

4646
import murfey
4747
import murfey.server.ispyb
@@ -225,6 +225,7 @@ def respond_with_template(
225225

226226

227227
def run():
228+
# Set up argument parser
228229
parser = argparse.ArgumentParser(description="Start the Murfey server")
229230
parser.add_argument(
230231
"--host",
@@ -273,28 +274,29 @@ def run():
273274
help="Increase logging output verbosity",
274275
default=0,
275276
)
277+
# Parse and separate known and unknown args
278+
args, unknown = parser.parse_known_args()
276279

280+
# Load the security configuration
277281
security_config = get_security_config()
278-
# setup logging
282+
283+
# Set up GrayLog handler if provided in the configuration
279284
if security_config.graylog_host:
280285
handler = graypy.GELFUDPHandler(
281286
security_config.graylog_host, security_config.graylog_port, level_names=True
282287
)
283288
root_logger = logging.getLogger()
284289
root_logger.addHandler(handler)
285-
286290
# Install a log filter to all existing handlers.
287291
LogFilter.install()
288292

289-
workflows.transport.load_configuration_file(security_config.rabbitmq_credentials)
290-
291-
args = parser.parse_args()
292-
293-
# Set up Zocalo connection
294293
if args.demo:
294+
# Run in demo mode with no connections set up
295295
os.environ["MURFEY_DEMO"] = "1"
296296
else:
297-
_set_up_transport(args.transport)
297+
# Load RabbitMQ configuration and set up the connection
298+
PikaTransport().load_configuration_file(security_config.rabbitmq_credentials)
299+
_set_up_transport("PikaTransport")
298300

299301
# Set up logging now that the desired verbosity is known
300302
_set_up_logging(quiet=args.quiet, verbosity=args.verbose)
@@ -393,7 +395,7 @@ def _set_up_logging(quiet: bool, verbosity: int):
393395
logging.getLogger(logger_name).setLevel(log_level)
394396

395397

396-
def _set_up_transport(transport_type):
398+
def _set_up_transport(transport_type: Literal["PikaTransport"]):
397399
global _transport_object
398400
_transport_object = TransportManager(transport_type)
399401

@@ -2471,19 +2473,16 @@ def _save_bfactor(message: dict, _db=murfey_db, demo: bool = False):
24712473
_transport_object.send(
24722474
"ispyb_connector",
24732475
{
2474-
"parameters": {
2475-
"ispyb_command": "buffer",
2476-
"buffer_lookup": {
2477-
"particle_classification_id": refined_class_uuid,
2478-
},
2479-
"buffer_command": {
2480-
"ispyb_command": "insert_particle_classification"
2481-
},
2482-
"program_id": message["program_id"],
2483-
"bfactor_fit_intercept": str(bfactor_fitting[1]),
2484-
"bfactor_fit_linear": str(bfactor_fitting[0]),
2476+
"ispyb_command": "buffer",
2477+
"buffer_lookup": {
2478+
"particle_classification_id": refined_class_uuid,
24852479
},
2486-
"content": {"dummy": "dummy"},
2480+
"buffer_command": {
2481+
"ispyb_command": "insert_particle_classification"
2482+
},
2483+
"program_id": message["program_id"],
2484+
"bfactor_fit_intercept": str(bfactor_fitting[1]),
2485+
"bfactor_fit_linear": str(bfactor_fitting[0]),
24872486
},
24882487
new_connection=True,
24892488
)
@@ -2639,7 +2638,9 @@ def feedback_callback(header: dict, message: dict) -> None:
26392638
cassetteSlot=message.get("sample"),
26402639
)
26412640
if _transport_object:
2642-
atlas_id = _transport_object.do_insert_atlas(atlas_record)
2641+
atlas_id = _transport_object.do_insert_atlas(atlas_record)[
2642+
"return_value"
2643+
]
26432644
murfey_dcg = db.DataCollectionGroup(
26442645
id=dcgid,
26452646
atlas_id=atlas_id,
@@ -2756,7 +2757,6 @@ def feedback_callback(header: dict, message: dict) -> None:
27562757
elif message["register"] == "processing_job":
27572758
murfey_session_id = message["session_id"]
27582759
logger.info("registering processing job")
2759-
assert isinstance(global_state["data_collection_ids"], dict)
27602760
dc = murfey_db.exec(
27612761
select(db.DataCollection, db.DataCollectionGroup)
27622762
.where(db.DataCollection.dcg_id == db.DataCollectionGroup.id)

0 commit comments

Comments
 (0)