Skip to content

Commit 11977b4

Browse files
committed
Pretty sure none of this is used
1 parent d28bbf1 commit 11977b4

File tree

5 files changed

+2
-142
lines changed

5 files changed

+2
-142
lines changed

src/murfey/client/contexts/spa.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -326,7 +326,6 @@ def __init__(self, acquisition_software: str, basepath: Path):
326326
super().__init__("SPA", acquisition_software)
327327
self._basepath = basepath
328328
self._processing_job_stash: dict = {}
329-
self._preprocessing_triggers: dict = {}
330329
self._foil_holes: Dict[int, List[int]] = {}
331330

332331
def gather_metadata(

src/murfey/client/contexts/tomo.py

Lines changed: 0 additions & 91 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88

99
import requests
1010
import xmltodict
11-
from pydantic import BaseModel
1211

1312
import murfey.util.eer
1413
from murfey.client.context import Context, ProcessingParameter
@@ -17,7 +16,6 @@
1716
MovieTracker,
1817
MurfeyID,
1918
MurfeyInstanceEnvironment,
20-
global_env_lock,
2119
)
2220
from murfey.util import authorised_requests, capture_post, get_machine_config_client
2321
from murfey.util.mdoc import get_block, get_global_data, get_num_blocks
@@ -65,15 +63,6 @@ def _construct_tilt_series_name(file_path: Path) -> str:
6563
return "_".join(split_name[:-5])
6664

6765

68-
class ProcessFileIncomplete(BaseModel):
69-
dest: Path
70-
source: Path
71-
image_number: int
72-
mc_uuid: int
73-
tag: str
74-
description: str = ""
75-
76-
7766
class TomographyContext(Context):
7867
user_params = [
7968
ProcessingParameter(
@@ -101,7 +90,6 @@ def __init__(self, acquisition_software: str, basepath: Path):
10190
self._aligned_tilt_series: List[str] = []
10291
self._data_collection_stash: list = []
10392
self._processing_job_stash: dict = {}
104-
self._preprocessing_triggers: dict = {}
10593
self._lock: RLock = RLock()
10694

10795
def _flush_data_collections(self):
@@ -120,12 +108,6 @@ def _flush_data_collections(self):
120108
capture_post(dc_data[0], json=data)
121109
self._data_collection_stash = []
122110

123-
def _flush_processing_job(self, tag: str):
124-
if proc_data := self._processing_job_stash.get(tag):
125-
for pd in proc_data:
126-
requests.post(pd[0], json=pd[1])
127-
self._processing_job_stash.pop(tag)
128-
129111
def _flush_processing_jobs(self):
130112
logger.info(
131113
f"Flushing {len(self._processing_job_stash.keys())} processing job API calls"
@@ -135,75 +117,6 @@ def _flush_processing_jobs(self):
135117
requests.post(pd[0], json=pd[1])
136118
self._processing_job_stash = {}
137119

138-
def _flush_preprocess(self, tag: str, app_id: int):
139-
if tag_tr := self._preprocessing_triggers.get(tag):
140-
for tr in tag_tr:
141-
process_file = self._complete_process_file(tr[1], tr[2], app_id)
142-
if process_file:
143-
capture_post(tr[0], json=process_file)
144-
self._preprocessing_triggers.pop(tag)
145-
146-
def _complete_process_file(
147-
self,
148-
incomplete_process_file: ProcessFileIncomplete,
149-
environment: MurfeyInstanceEnvironment,
150-
app_id: int,
151-
) -> dict:
152-
try:
153-
with global_env_lock:
154-
tag = incomplete_process_file.tag
155-
156-
eer_fractionation_file = None
157-
if environment.data_collection_parameters.get("num_eer_frames"):
158-
response = requests.post(
159-
f"{str(environment.url.geturl())}/visits/{environment.visit}/{environment.murfey_session}/eer_fractionation_file",
160-
json={
161-
"num_frames": environment.data_collection_parameters[
162-
"num_eer_frames"
163-
],
164-
"fractionation": environment.data_collection_parameters[
165-
"eer_fractionation"
166-
],
167-
"dose_per_frame": environment.data_collection_parameters[
168-
"dose_per_frame"
169-
],
170-
"fractionation_file_name": "eer_fractionation_tomo.txt",
171-
},
172-
)
173-
eer_fractionation_file = response.json()["eer_fractionation_file"]
174-
175-
new_dict = {
176-
"path": str(incomplete_process_file.dest),
177-
"description": incomplete_process_file.description,
178-
"size": incomplete_process_file.source.stat().st_size,
179-
"timestamp": incomplete_process_file.source.stat().st_ctime,
180-
"processing_job": environment.processing_job_ids[tag][
181-
"em-tomo-preprocess"
182-
],
183-
"data_collection_id": environment.data_collection_ids[tag],
184-
"image_number": incomplete_process_file.image_number,
185-
"pixel_size": environment.data_collection_parameters[
186-
"pixel_size_on_image"
187-
],
188-
"autoproc_program_id": app_id,
189-
"mc_uuid": incomplete_process_file.mc_uuid,
190-
"dose_per_frame": environment.data_collection_parameters.get(
191-
"dose_per_frame"
192-
),
193-
"mc_binning": environment.data_collection_parameters.get(
194-
"motion_corr_binning", 1
195-
),
196-
"gain_ref": environment.data_collection_parameters.get("gain_ref"),
197-
"voltage": environment.data_collection_parameters.get(
198-
"voltage", 300
199-
),
200-
"eer_fractionation_file": eer_fractionation_file,
201-
}
202-
return new_dict
203-
except KeyError:
204-
logger.warning("Key error encountered in _complete_process_file")
205-
return {}
206-
207120
def _file_transferred_to(
208121
self, environment: MurfeyInstanceEnvironment, source: Path, file_path: Path
209122
):
@@ -441,14 +354,10 @@ def _add_tilt(
441354
preproc_data = {
442355
"path": str(file_transferred_to),
443356
"description": "",
444-
"data_collection_id": environment.data_collection_ids.get(tilt_series),
445357
"image_number": environment.movies[file_transferred_to].movie_number,
446358
"pixel_size": environment.data_collection_parameters.get(
447359
"pixel_size_on_image", 0
448360
),
449-
"autoproc_program_id": environment.autoproc_program_ids.get(
450-
tilt_series, {}
451-
).get("em-tomo-preprocess"),
452361
"dose_per_frame": environment.data_collection_parameters.get(
453362
"dose_per_frame", 0
454363
),

src/murfey/client/instance_environment.py

Lines changed: 2 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,10 @@
55
from itertools import count
66
from pathlib import Path
77
from threading import RLock
8-
from typing import Callable, Dict, List, NamedTuple, Optional, Set
8+
from typing import Dict, List, NamedTuple, Optional
99
from urllib.parse import ParseResult
1010

11-
from pydantic import BaseModel, validator
11+
from pydantic import BaseModel
1212

1313
from murfey.client.watchdir import DirWatcher
1414

@@ -42,18 +42,8 @@ class MurfeyInstanceEnvironment(BaseModel):
4242
watchers: Dict[Path, DirWatcher] = {}
4343
demo: bool = False
4444
data_collection_group_ids: Dict[str, int] = {}
45-
data_collection_ids: Dict[str, int] = {}
46-
processing_job_ids: Dict[str, Dict[str, int]] = {}
47-
autoproc_program_ids: Dict[str, Dict[str, int]] = {}
48-
id_tag_registry: Dict[str, List[str]] = {
49-
"data_collection_group": [],
50-
"data_collection": [],
51-
"processing_job": [],
52-
"auto_proc_program": [],
53-
}
5445
data_collection_parameters: dict = {}
5546
movies: Dict[Path, MovieTracker] = {}
56-
listeners: Dict[str, Set[Callable]] = {}
5747
movie_tilt_pair: Dict[Path, str] = {}
5848
tilt_angles: Dict[str, List[List[str]]] = {}
5949
movie_counters: Dict[str, itertools.count] = {}
@@ -68,47 +58,15 @@ class Config:
6858
validate_assignment: bool = True
6959
arbitrary_types_allowed: bool = True
7060

71-
@validator("data_collection_group_ids")
72-
def dcg_callback(cls, v, values):
73-
with global_env_lock:
74-
for l in values.get("listeners", {}).get("data_collection_group_ids", []):
75-
for k in v.keys():
76-
if k not in values["id_tag_registry"]["data_collection"]:
77-
l(k)
78-
return v
79-
80-
@validator("data_collection_ids")
81-
def dc_callback(cls, v, values):
82-
with global_env_lock:
83-
for l in values.get("listeners", {}).get("data_collection_ids", []):
84-
for k in v.keys():
85-
if k not in values["id_tag_registry"]["processing_job"]:
86-
l(k)
87-
return v
88-
89-
@validator("autoproc_program_ids")
90-
def app_callback(cls, v, values):
91-
# logger.info(f"autoproc program ids validator: {v}")
92-
with global_env_lock:
93-
for l in values.get("listeners", {}).get("autoproc_program_ids", []):
94-
for k in v.keys():
95-
if v[k].get("em-tomo-preprocess"):
96-
l(k, v[k]["em-tomo-preprocess"])
97-
return v
98-
9961
def clear(self):
10062
self.sources = []
10163
self.default_destinations = {}
10264
for w in self.watchers.values():
10365
w.stop()
10466
self.watchers = {}
10567
self.data_collection_group_ids = {}
106-
self.data_collection_ids = {}
107-
self.processing_job_ids = {}
108-
self.autoproc_program_ids = {}
10968
self.data_collection_parameters = {}
11069
self.movies = {}
111-
self.listeners = {}
11270
self.movie_tilt_pair = {}
11371
self.tilt_angles = {}
11472
self.visit = ""

src/murfey/client/multigrid_control.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -370,9 +370,6 @@ def _start_dc(self, json, from_form: bool = False):
370370

371371
source = Path(json["source"])
372372

373-
self._environment.id_tag_registry["data_collection_group"].append(
374-
str(source)
375-
)
376373
url = f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/register_data_collection_group"
377374
dcg_data = {
378375
"experiment_type": "tomo",

src/murfey/util/models.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -312,12 +312,9 @@ class ProcessFile(BaseModel): # Rename to TomoProcessFile
312312
path: str
313313
description: str
314314
tag: str
315-
data_collection_id: Optional[int]
316315
image_number: int
317316
pixel_size: float
318317
dose_per_frame: float
319-
processing_job: Optional[int] = None
320-
autoproc_program_id: Optional[int] = None
321318
mc_uuid: Optional[int] = None
322319
voltage: float = 300
323320
mc_binning: int = 1

0 commit comments

Comments
 (0)