Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions scripts/export_lower_resolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import zarr

from flamingo_tools.s3_utils import get_s3_path, BUCKET_NAME, SERVICE_ENDPOINT
# from skimage.segmentation import relabel_sequential
from skimage.segmentation import relabel_sequential


def filter_component(fs, segmentation, cochlea, seg_name, components):
Expand All @@ -22,7 +22,8 @@ def filter_component(fs, segmentation, cochlea, seg_name, components):
filter_mask = ~np.isin(segmentation, keep_label_ids)
segmentation[filter_mask] = 0

# segmentation, _, _ = relabel_sequential(segmentation)
segmentation, _, _ = relabel_sequential(segmentation)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think that this is the culprit why IHC ids and synapse IDs don't match. We still need to be careful here though, if the max ID exceeds 65,... than we run out of uint16 IDs, which may happen for SGNs and is why I included this.

Maybe the best option is to remove the sequential relabeling here, and to raise a warning if we have larger values than the uint16 max id, as this won't lead to a big issue, just some ID clashes.

segmentation = segmentation.astype("uint16")
return segmentation


Expand All @@ -41,7 +42,7 @@ def export_lower_resolution(args):
s3_store, fs = get_s3_path(internal_path, bucket_name=BUCKET_NAME, service_endpoint=SERVICE_ENDPOINT)
with zarr.open(s3_store, mode="r") as f:
data = f[input_key][:]
print(data.shape)

if args.filter_by_components is not None:
data = filter_component(fs, data, args.cochlea, channel, args.filter_by_components)
if args.binarize:
Expand Down
9 changes: 7 additions & 2 deletions scripts/export_synapse_detections.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from tqdm import tqdm


# TODO
def export_synapse_detections(cochlea, scale, output_folder, synapse_name, reference_ihcs, max_dist, radius):
s3 = create_s3_target()

Expand Down Expand Up @@ -53,14 +54,18 @@ def export_synapse_detections(cochlea, scale, output_folder, synapse_name, refer
coordinates /= (2 ** scale)
coordinates = np.round(coordinates, 0).astype("int")

ihc_ids = syn_table["matched_ihc"].values

# Create the output.
output = np.zeros(shape, dtype="uint16")
mask = ball(radius).astype(bool)

for coord in tqdm(coordinates, desc="Writing synapses to volume"):
for coord, matched_ihc in tqdm(
zip(coordinates, ihc_ids), total=len(coordinates), desc="Writing synapses to volume"
):
bb = tuple(slice(c - radius, c + radius + 1) for c in coord)
try:
output[bb][mask] = 1
output[bb][mask] = matched_ihc
except IndexError:
print("Index error for", coord)
continue
Expand Down
Loading