Skip to content

Commit 3c6a31b

Browse files
committed
Merged recent changes from main branch
2 parents c543f6a + 208f4fb commit 3c6a31b

35 files changed

+961
-528
lines changed

.bumpclient.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[tool.bumpversion]
2-
current_version = "0.15.3"
2+
current_version = "0.15.5"
33
commit = true
44
tag = false
55

.bumpversion.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[tool.bumpversion]
2-
current_version = "0.15.3"
2+
current_version = "0.15.5"
33
commit = true
44
tag = true
55

.github/workflows/ci.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ jobs:
1717
python-version: ["3.9", "3.10", "3.11"]
1818
services:
1919
mariadb:
20-
image: mariadb:11.3.2 # released 2024-05-06
20+
image: mariadb:11.6.2 # released 2024-05-06
2121
# Pulls image from DockerHub
2222
# Docker images: https://hub.docker.com/_/mariadb
2323
# Previous version(s):
@@ -108,7 +108,7 @@ jobs:
108108
PYTHONDEVMODE=1 pytest -v -ra --cov=murfey --cov-report=xml --cov-branch
109109
110110
- name: Upload to Codecov
111-
uses: codecov/codecov-action@v4
111+
uses: codecov/codecov-action@v5
112112
with:
113113
name: ${{ matrix.python-version }}
114114
files: coverage.xml

.github/workflows/publish-version.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ jobs:
128128
name: package-distributions
129129
path: dist/
130130
- name: Sign the dists with Sigstore
131-
uses: sigstore/gh-action-sigstore-python@v2.1.1
131+
uses: sigstore/gh-action-sigstore-python@v3.0.0
132132
with:
133133
inputs: >-
134134
./dist/*.tar.gz

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ The packages included under the `[developer]` installation key contain some help
2929
- `pre-commit` - Allows for the installation and running of hooks to help with linting, formatting, and type checking your code.
3030
- `pytest` - Used in conjunction with test functions to evaluate the reliability of your code.
3131

32-
Instructions for setting up the database for Murfey to register files to can be found [here](src/murfey/server/MURFEY_DB.md).
32+
Instructions for setting up the database for Murfey to register files to can be found [here](src/murfey/server/README.md).
3333

3434
Finally, you may want to set up an ISPyB mock database server and a Zocalo
3535
development environment. The instructions for this are out of scope here.

pyproject.toml

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ requires = [
77

88
[project]
99
name = "murfey"
10-
version = "0.15.3"
10+
version = "0.15.5"
1111
description = "Client-Server architecture hauling Cryo-EM data"
1212
readme = "README.md"
1313
keywords = [
@@ -99,10 +99,12 @@ murfey = "murfey.client:run"
9999
[project.entry-points."murfey.config.extraction"]
100100
"murfey_machine" = "murfey.util.config:get_extended_machine_config"
101101
[project.entry-points."murfey.workflows"]
102-
"process_raw_lifs" = "murfey.workflows.clem.process_raw_lifs:zocalo_cluster_request"
103-
"process_raw_tiffs" = "murfey.workflows.clem.process_raw_tiffs:zocalo_cluster_request"
104-
"register_lif_preprocessing_result" = "murfey.workflows.clem.register_preprocessing_results:register_lif_preprocessing_result"
105-
"register_tiff_preprocessing_result" = "murfey.workflows.clem.register_preprocessing_results:register_tiff_preprocessing_result"
102+
"clem.align_and_merge" = "murfey.workflows.clem.align_and_merge:submit_cluster_request"
103+
"clem.process_raw_lifs" = "murfey.workflows.clem.process_raw_lifs:zocalo_cluster_request"
104+
"clem.process_raw_tiffs" = "murfey.workflows.clem.process_raw_tiffs:zocalo_cluster_request"
105+
"clem.register_align_and_merge_result" = "murfey.workflows.clem.register_align_and_merge_results:register_align_and_merge_result"
106+
"clem.register_lif_preprocessing_result" = "murfey.workflows.clem.register_preprocessing_results:register_lif_preprocessing_result"
107+
"clem.register_tiff_preprocessing_result" = "murfey.workflows.clem.register_preprocessing_results:register_tiff_preprocessing_result"
106108

107109
[tool.setuptools]
108110
package-dir = {"" = "src"}

src/murfey/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
from __future__ import annotations
22

3-
__version__ = "0.15.3"
4-
__supported_client_version__ = "0.15.3"
3+
__version__ = "0.15.5"
4+
__supported_client_version__ = "0.15.5"

src/murfey/cli/transfer.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,10 +36,8 @@ def run():
3636
murfey_url = urlparse(args.server, allow_fragments=False)
3737

3838
machine_data = requests.get(f"{murfey_url.geturl()}/machine").json()
39-
if Path(args.source or ".").resolve() in machine_data.data_directories.keys():
40-
console.print(
41-
f"[red]Source directory is the base directory for the {machine_data.data_directories[Path(args.source or '.').resolve()]}, exiting"
42-
)
39+
if Path(args.source or ".").resolve() in machine_data.data_directories:
40+
console.print("[red]Source directory is the base directory, exiting")
4341
return
4442

4543
cmd = [

src/murfey/client/__init__.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -215,12 +215,6 @@ def run():
215215
default=False,
216216
help="Remove source files immediately after their transfer",
217217
)
218-
parser.add_argument(
219-
"--relax",
220-
action="store_true",
221-
default=False,
222-
help="Relax the condition that the source directory needs to be recognised from the configuration",
223-
)
224218
parser.add_argument(
225219
"--name",
226220
type=str,
@@ -344,7 +338,6 @@ def run():
344338
gain_ref=gain_ref,
345339
redirected_logger=rich_handler,
346340
force_mdoc_metadata=not args.ignore_mdoc_metadata,
347-
strict=not args.relax,
348341
processing_enabled=machine_data.get("processing_enabled", True),
349342
skip_existing_processing=args.skip_existing_processing,
350343
)

src/murfey/client/analyser.py

Lines changed: 9 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,6 @@ def __init__(
5757
self._limited = limited
5858
self._experiment_type = ""
5959
self._acquisition_software = ""
60-
self._role = ""
6160
self._extension: str = ""
6261
self._unseen_xml: list = []
6362
self._context: Context | None = None
@@ -126,14 +125,12 @@ def _find_context(self, file_path: Path) -> bool:
126125
in the Context classes themselves.
127126
"""
128127
if "atlas" in file_path.parts:
129-
self._role = "detector"
130128
self._context = SPAMetadataContext("epu", self._basepath)
131129
return True
132130

133131
# CLEM workflow checks
134132
# Look for LIF and XLIF files
135133
if file_path.suffix in (".lif", ".xlif"):
136-
self._role = "detector"
137134
self._context = CLEMContext("leica", self._basepath)
138135
return True
139136
# Look for TIFF files associated with CLEM workflow
@@ -152,7 +149,6 @@ def _find_context(self, file_path: Path) -> bool:
152149
).get("analyse_created_directories", [])
153150
)
154151
if created_directories.intersection(set(file_path.parts)):
155-
self._role = "detector"
156152
self._context = CLEMContext("leica", self._basepath)
157153
return True
158154

@@ -181,9 +177,6 @@ def _find_context(self, file_path: Path) -> bool:
181177
else SPAContext("epu", self._basepath)
182178
)
183179
self.parameters_model = ProcessingParametersSPA
184-
# Assign it the detector attribute if not already present
185-
if not self._role:
186-
self._role = "detector"
187180
return True
188181

189182
# Files starting with "Position" belong to the standard tomography workflow
@@ -198,23 +191,6 @@ def _find_context(self, file_path: Path) -> bool:
198191
logger.info("Acquisition software: tomo")
199192
self._context = TomographyContext("tomo", self._basepath)
200193
self.parameters_model = PreprocessingParametersTomo
201-
# Assign role if not already present
202-
if not self._role:
203-
# Fractions files attributed to the detector
204-
if (
205-
"Fractions" in split_file_name[-1]
206-
or "fractions" in split_file_name[-1]
207-
):
208-
self._role = "detector"
209-
# MDOC files attributed to the microscope
210-
elif (
211-
file_path.suffix == ".mdoc"
212-
or file_path.with_suffix(".mdoc").is_file()
213-
):
214-
self._role = "microscope"
215-
# Attribute all other files to the detector
216-
else:
217-
self._role = "detector"
218194
return True
219195

220196
# Files with these suffixes belong to the serial EM tomography workflow
@@ -239,19 +215,14 @@ def _find_context(self, file_path: Path) -> bool:
239215
return False
240216
self._context = TomographyContext("serialem", self._basepath)
241217
self.parameters_model = PreprocessingParametersTomo
242-
if not self._role:
243-
if "Frames" in file_path.parts:
244-
self._role = "detector"
245-
else:
246-
self._role = "microscope"
247218
return True
248219
return False
249220

250221
def post_transfer(self, transferred_file: Path):
251222
try:
252223
if self._context:
253224
self._context.post_transfer(
254-
transferred_file, role=self._role, environment=self._environment
225+
transferred_file, environment=self._environment
255226
)
256227
except Exception as e:
257228
logger.error(f"An exception was encountered post transfer: {e}")
@@ -309,19 +280,17 @@ def _analyse(self):
309280
self.queue.task_done()
310281
continue
311282
elif self._extension:
312-
logger.info(f"Context found successfully: {self._role}")
283+
logger.info(
284+
f"Context found successfully for {transferred_file}"
285+
)
313286
try:
314287
self._context.post_first_transfer(
315288
transferred_file,
316-
role=self._role,
317289
environment=self._environment,
318290
)
319291
except Exception as e:
320292
logger.error(f"Exception encountered: {e}")
321-
if (
322-
self._role == "detector"
323-
and "atlas" not in transferred_file.parts
324-
):
293+
if "atlas" not in transferred_file.parts:
325294
if not dc_metadata:
326295
try:
327296
dc_metadata = self._context.gather_metadata(
@@ -377,20 +346,16 @@ def _analyse(self):
377346
self._find_extension(transferred_file)
378347
if self._extension:
379348
logger.info(
380-
f"Context found successfully: {self._role}, {transferred_file}"
349+
f"Extension found successfully for {transferred_file}"
381350
)
382351
try:
383352
self._context.post_first_transfer(
384353
transferred_file,
385-
role=self._role,
386354
environment=self._environment,
387355
)
388356
except Exception as e:
389357
logger.error(f"Exception encountered: {e}")
390-
if (
391-
self._role == "detector"
392-
and "atlas" not in transferred_file.parts
393-
):
358+
if "atlas" not in transferred_file.parts:
394359
if not dc_metadata:
395360
try:
396361
dc_metadata = self._context.gather_metadata(
@@ -443,8 +408,8 @@ def _xml_file(self, data_file: Path) -> Path:
443408
if not self._environment:
444409
return data_file.with_suffix(".xml")
445410
file_name = f"{'_'.join(p for p in data_file.stem.split('_')[:-1])}.xml"
446-
data_directories = self._murfey_config.get("data_directories", {})
447-
for dd in data_directories.keys():
411+
data_directories = self._murfey_config.get("data_directories", [])
412+
for dd in data_directories:
448413
if str(data_file).startswith(dd):
449414
base_dir = Path(dd)
450415
mid_dir = data_file.relative_to(dd).parent

0 commit comments

Comments
 (0)