Skip to content

Commit 8c1cde1

Browse files
committed
Merged recent changes from 'main' branch
2 parents 00502e4 + 51fd6ad commit 8c1cde1

24 files changed

+306
-455
lines changed

.bumpversion.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[tool.bumpversion]
2-
current_version = "0.16.6"
2+
current_version = "0.16.7"
33
commit = true
44
tag = true
55

.github/workflows/ci.yml

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,16 @@ jobs:
2929
- 3306:3306
3030
options: --health-cmd="healthcheck.sh --connect --innodb_initialized" --health-interval=10s --health-timeout=5s --health-retries=3
3131

32+
postgres:
33+
image: postgres:latest
34+
env:
35+
POSTGRES_DB: murfey_test_db
36+
POSTGRES_PASSWORD: psql_pwd
37+
POSTGRES_USER: psql_user
38+
ports:
39+
- 5432:5432
40+
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
41+
3242
steps:
3343
- uses: actions/checkout@v4
3444
- name: Use Python ${{ matrix.python-version }}
@@ -57,7 +67,7 @@ jobs:
5767
docker run --detach --name rabbitmq -p 127.0.0.1:5672:5672 -p 127.0.0.1:15672:15672 test-rabbitmq
5868
docker container list -a
5969
60-
- name: Get database
70+
- name: Get ispyb database
6171
uses: actions/download-artifact@v4
6272
with:
6373
name: database
@@ -74,7 +84,7 @@ jobs:
7484
mysql-version: "11.3"
7585
auto-start: false
7686

77-
- name: Set up test database
87+
- name: Set up test ipsyb database
7888
run: |
7989
set -eu
8090
cp ".github/workflows/config/my.cnf" .my.cnf
@@ -103,6 +113,12 @@ jobs:
103113
run: wget -t 10 -w 1 http://127.0.0.1:15672 -O -
104114

105115
- name: Run tests
116+
env:
117+
POSTGRES_HOST: localhost
118+
POSTGRES_PORT: 5432
119+
POSTGRES_DB: murfey_test_db
120+
POSTGRES_PASSWORD: psql_pwd
121+
POSTGRES_USER: psql_user
106122
run: |
107123
export ISPYB_CREDENTIALS=".github/workflows/config/ispyb.cfg"
108124
PYTHONDEVMODE=1 pytest -v -ra --cov=murfey --cov-report=xml --cov-branch

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ requires = [
77

88
[project]
99
name = "murfey"
10-
version = "0.16.6"
10+
version = "0.16.7"
1111
description = "Client-Server architecture hauling Cryo-EM data"
1212
readme = "README.md"
1313
keywords = [

src/murfey/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
from __future__ import annotations
22

3-
__version__ = "0.16.6"
3+
__version__ = "0.16.7"
44
__supported_client_version__ = "0.16.6"

src/murfey/client/analyser.py

Lines changed: 4 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717
from murfey.client.context import Context
1818
from murfey.client.contexts.clem import CLEMContext
19-
from murfey.client.contexts.spa import SPAContext, SPAModularContext
19+
from murfey.client.contexts.spa import SPAModularContext
2020
from murfey.client.contexts.spa_metadata import SPAMetadataContext
2121
from murfey.client.contexts.tomo import TomographyContext
2222
from murfey.client.instance_environment import MurfeyInstanceEnvironment
@@ -159,23 +159,7 @@ def _find_context(self, file_path: Path) -> bool:
159159
if split_file_name[0].startswith("FoilHole"):
160160
if not self._context:
161161
logger.info("Acquisition software: EPU")
162-
if self._environment:
163-
try:
164-
cfg = get_machine_config_client(
165-
str(self._environment.url.geturl()),
166-
instrument_name=self._environment.instrument_name,
167-
demo=self._environment.demo,
168-
)
169-
except Exception as e:
170-
logger.error(f"Exception encountered: {e}")
171-
cfg = {}
172-
else:
173-
cfg = {}
174-
self._context = (
175-
SPAModularContext("epu", self._basepath)
176-
if cfg.get("modular_spa")
177-
else SPAContext("epu", self._basepath)
178-
)
162+
self._context = SPAModularContext("epu", self._basepath)
179163
self.parameters_model = ProcessingParametersSPA
180164
return True
181165

@@ -325,8 +309,7 @@ def _analyse(self):
325309
"form": dc_metadata,
326310
"dependencies": (
327311
spa_form_dependencies
328-
if isinstance(self._context, SPAContext)
329-
or isinstance(
312+
if isinstance(
330313
self._context, SPAModularContext
331314
)
332315
else {}
@@ -385,8 +368,7 @@ def _analyse(self):
385368
"form": dc_metadata,
386369
"dependencies": (
387370
spa_form_dependencies
388-
if isinstance(self._context, SPAContext)
389-
or isinstance(
371+
if isinstance(
390372
self._context, SPAModularContext
391373
)
392374
else {}

src/murfey/client/contexts/spa.py

Lines changed: 5 additions & 106 deletions
Original file line numberDiff line numberDiff line change
@@ -66,13 +66,16 @@ def _grid_square_metadata_file(
6666
break
6767
else:
6868
raise ValueError(f"Could not determine grid square metadata path for {f}")
69-
return (
69+
metadata_file = (
7070
base_dir
7171
/ visit
7272
/ mid_dir.parent.parent.parent
7373
/ "Metadata"
7474
/ f"GridSquare_{grid_square}.dm"
7575
)
76+
if not metadata_file.is_file():
77+
logger.warning(f"Grid square metadata file {str(metadata_file)} does not exist")
78+
return metadata_file
7679

7780

7881
def _get_source(file_path: Path, environment: MurfeyInstanceEnvironment) -> Path | None:
@@ -89,7 +92,7 @@ def _get_xml_list_index(key: str, xml_list: list) -> int:
8992
raise ValueError(f"Key not found in XML list: {key}")
9093

9194

92-
class _SPAContext(Context):
95+
class SPAModularContext(Context):
9396
user_params = [
9497
ProcessingParameter(
9598
"dose_per_frame",
@@ -130,7 +133,6 @@ def __init__(self, acquisition_software: str, basepath: Path):
130133
super().__init__("SPA", acquisition_software)
131134
self._basepath = basepath
132135
self._processing_job_stash: dict = {}
133-
self._preprocessing_triggers: dict = {}
134136
self._foil_holes: Dict[int, List[int]] = {}
135137

136138
def gather_metadata(
@@ -362,8 +364,6 @@ def gather_metadata(
362364
) or True
363365
return metadata
364366

365-
366-
class SPAModularContext(_SPAContext):
367367
def _position_analysis(
368368
self,
369369
transferred_file: Path,
@@ -658,104 +658,3 @@ def _launch_spa_pipeline(
658658
url: str = "",
659659
):
660660
return
661-
662-
663-
class SPAContext(_SPAContext):
664-
def _register_data_collection(
665-
self,
666-
tag: str,
667-
url: str,
668-
data: dict,
669-
environment: MurfeyInstanceEnvironment,
670-
):
671-
logger.info(f"registering data collection with data {data}")
672-
environment.id_tag_registry["data_collection"].append(tag)
673-
image_directory = str(environment.default_destinations[Path(tag)])
674-
json = {
675-
"voltage": data["voltage"],
676-
"pixel_size_on_image": data["pixel_size_on_image"],
677-
"experiment_type": data["experiment_type"],
678-
"image_size_x": data["image_size_x"],
679-
"image_size_y": data["image_size_y"],
680-
"file_extension": data["file_extension"],
681-
"acquisition_software": data["acquisition_software"],
682-
"image_directory": image_directory,
683-
"tag": tag,
684-
"source": tag,
685-
"magnification": data["magnification"],
686-
"total_exposed_dose": data.get("total_exposed_dose"),
687-
"c2aperture": data.get("c2aperture"),
688-
"exposure_time": data.get("exposure_time"),
689-
"slit_width": data.get("slit_width"),
690-
"phase_plate": data.get("phase_plate", False),
691-
}
692-
capture_post(url, json=json)
693-
694-
def post_transfer(
695-
self,
696-
transferred_file: Path,
697-
environment: MurfeyInstanceEnvironment | None = None,
698-
**kwargs,
699-
) -> bool:
700-
return True
701-
702-
def _register_processing_job(
703-
self,
704-
tag: str,
705-
environment: MurfeyInstanceEnvironment,
706-
parameters: Dict[str, Any] | None = None,
707-
):
708-
logger.info(f"registering processing job with parameters: {parameters}")
709-
parameters = parameters or {}
710-
environment.id_tag_registry["processing_job"].append(tag)
711-
proc_url = f"{str(environment.url.geturl())}/visits/{environment.visit}/{environment.murfey_session}/register_processing_job"
712-
machine_config = get_machine_config_client(
713-
str(environment.url.geturl()),
714-
instrument_name=environment.instrument_name,
715-
demo=environment.demo,
716-
)
717-
image_directory = str(
718-
Path(machine_config.get("rsync_basepath", "."))
719-
/ environment.default_destinations[Path(tag)]
720-
)
721-
if self._acquisition_software == "epu":
722-
import_images = f"{Path(image_directory).resolve()}/GridSquare*/Data/*{parameters['file_extension']}"
723-
else:
724-
import_images = (
725-
f"{Path(image_directory).resolve()}/*{parameters['file_extension']}"
726-
)
727-
msg: Dict[str, Any] = {
728-
"tag": tag,
729-
"source": tag,
730-
"recipe": "ispyb-relion",
731-
"parameters": {
732-
"acquisition_software": parameters["acquisition_software"],
733-
"voltage": parameters["voltage"],
734-
"gain_ref": parameters["gain_ref"],
735-
"dose_per_frame": parameters["dose_per_frame"],
736-
"eer_grouping": parameters["eer_fractionation"],
737-
"import_images": import_images,
738-
"angpix": float(parameters["pixel_size_on_image"]) * 1e10,
739-
"symmetry": parameters["symmetry"],
740-
"boxsize": parameters["boxsize"],
741-
"downscale": parameters["downscale"],
742-
"small_boxsize": parameters["small_boxsize"],
743-
"mask_diameter": parameters["mask_diameter"],
744-
"use_cryolo": parameters["use_cryolo"],
745-
"estimate_particle_diameter": parameters["estimate_particle_diameter"],
746-
},
747-
}
748-
if parameters["particle_diameter"]:
749-
msg["parameters"]["particle_diameter"] = parameters["particle_diameter"]
750-
capture_post(proc_url, json=msg)
751-
752-
def _launch_spa_pipeline(
753-
self,
754-
tag: str,
755-
jobid: int,
756-
environment: MurfeyInstanceEnvironment,
757-
url: str = "",
758-
):
759-
environment.id_tag_registry["auto_proc_program"].append(tag)
760-
data = {"job_id": jobid}
761-
capture_post(url, json=data)

src/murfey/client/contexts/spa_metadata.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ def _foil_hole_positions(xml_path: Path, grid_square: int) -> Dict[str, FoilHole
3030
required_key = key
3131
break
3232
if not required_key:
33+
logger.warning(f"Required key not found for {str(xml_path)}")
3334
return {}
3435
foil_holes = {}
3536
for fh_block in serialization_array[required_key]:
@@ -149,10 +150,13 @@ def post_transfer(
149150
atlas=Path(partial_path), sample=sample
150151
)
151152
url = f"{str(environment.url.geturl())}/visits/{environment.visit}/{environment.murfey_session}/register_data_collection_group"
153+
dcg_tag = "/".join(
154+
p for p in transferred_file.parent.parts if p != environment.visit
155+
)
152156
dcg_data = {
153157
"experiment_type": "single particle",
154158
"experiment_type_id": 37,
155-
"tag": str(source),
159+
"tag": dcg_tag,
156160
"atlas": str(
157161
_atlas_destination(environment, source, transferred_file)
158162
/ environment.samples[source].atlas
@@ -169,7 +173,7 @@ def post_transfer(
169173
capture_post(
170174
f"{str(environment.url.geturl())}/sessions/{environment.murfey_session}/grid_square/{gs}",
171175
json={
172-
"tag": str(source),
176+
"tag": dcg_tag,
173177
"x_location": pos_data[0],
174178
"y_location": pos_data[1],
175179
"x_stage_position": pos_data[2],
@@ -186,6 +190,9 @@ def post_transfer(
186190
and environment
187191
):
188192
gs_name = transferred_file.stem.split("_")[1]
193+
logger.info(
194+
f"Collecting foil hole positions for {str(transferred_file)} and grid square {int(gs_name)}"
195+
)
189196
fh_positions = _foil_hole_positions(transferred_file, int(gs_name))
190197
source = _get_source(transferred_file, environment=environment)
191198
visitless_source = str(source).replace(f"/{environment.visit}", "")

0 commit comments

Comments
 (0)