Skip to content

Commit 537ca97

Browse files
Merge branch 'master' into dev
2 parents 1a6dc5e + 3ec97ab commit 537ca97

File tree

3 files changed

+16
-2
lines changed

3 files changed

+16
-2
lines changed

RELEASE_OVERVIEW.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,10 @@
11
# Release Overview
22

3+
**New in version 1.5.0**
4+
- Preliminary version of object classification tool.
5+
- Enabling support for napari v6, zarr v3 and numpy v2.
6+
- Add support for training models for automatic instance segmentation-only.
7+
38
**New in version 1.4.0**
49

510
This release includes three main changes:

micro_sam/instance_segmentation.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -805,6 +805,7 @@ def get_unetr(
805805
use_skip_connection=False,
806806
resize_input=True,
807807
use_conv_transpose=use_conv_transpose,
808+
808809
)
809810

810811
if decoder_state is not None:

micro_sam/training/training.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -664,8 +664,9 @@ def default_sam_dataset(
664664
if raw_transform is None:
665665
raw_transform = require_8bit
666666

667+
# Prepare the label transform.
667668
if with_segmentation_decoder:
668-
label_transform = torch_em.transform.label.PerObjectDistanceTransform(
669+
default_label_transform = torch_em.transform.label.PerObjectDistanceTransform(
669670
distances=True,
670671
boundary_distances=True,
671672
directed_distances=False,
@@ -674,7 +675,14 @@ def default_sam_dataset(
674675
min_size=min_size,
675676
)
676677
else:
677-
label_transform = torch_em.transform.label.MinSizeLabelTransform(min_size=min_size)
678+
default_label_transform = torch_em.transform.label.MinSizeLabelTransform(min_size=min_size)
679+
680+
# Allow combining label transforms.
681+
custom_label_transform = kwargs.pop("label_transform", None)
682+
if custom_label_transform is None:
683+
label_transform = default_label_transform
684+
else:
685+
label_transform = torch_em.transform.generic.Compose(custom_label_transform, default_label_transform)
678686

679687
# Check the patch shape to add a singleton if required.
680688
patch_shape = _update_patch_shape(

0 commit comments

Comments
 (0)