Skip to content

Commit fd9e3d2

Browse files
authored
[wk-libs] Dataset: fix bug with upload linking, add remove_layer (#584)
* cancel previous CI runs on same branch * add cluster_tools to Readme * dataset.upload: fix bug with linking, added dataset.remove_layer * update changelog * undo remove_layer * update changelog * minor fix
1 parent e48adbc commit fd9e3d2

File tree

6 files changed

+33
-7
lines changed

6 files changed

+33
-7
lines changed

.github/workflows/ci.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,10 @@ name: CI
22

33
on: push
44

5+
concurrency:
6+
group: ${{ github.workflow }}-${{ github.ref }}
7+
cancel-in-progress: true
8+
59
jobs:
610
changes:
711
runs-on: ubuntu-latest

README.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,3 +25,6 @@ Use this for:
2525
- converting Tiff-stacks and other data formats for volume image data to webKnossos-compatible *.wkw files from the CLI
2626
- up/downsampling of *.wkw files to different magnification levels (image pyramid) from the CLI
2727
- compressing your *.wkw files to save disk space from the CLI
28+
29+
## [Cluster Tools](cluster_tools)
30+
The `cluster_tools` package provides python `Executor` classes for distributing tasks on a slurm cluster or via multi processing.

webknossos/Changelog.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ For upgrade instructions, please check the respective *Breaking Changes* section
1616
### Changed
1717

1818
### Fixed
19+
- `dataset.upload(layers_to_link=…)`: Fixed a bug where the upload did not complete if layers_to_link contained layers present in uploading dataset. [#584](https://github.com/scalableminds/webknossos-libs/pull/584)
1920

2021

2122
## [0.9.2](https://github.com/scalableminds/webknossos-libs/releases/tag/v0.9.2) - 2022-02-03

webknossos/examples/learned_segmenter.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,10 +52,8 @@ def main() -> None:
5252
) # wk data has dimensions (Channels, X, Y, Z)
5353
# move channels to last dimension, remove z dimension to match skimage's shape
5454
X_train = np.moveaxis(np.squeeze(img_data_train), 0, -1)
55-
print(volume_annotation.mags[mag].bounding_box)
5655
Y_train = np.squeeze(volume_annotation.mags[mag].read())
5756

58-
print(training_data_bbox, X_train.shape, Y_train.shape)
5957
segmenter.fit(X_train, Y_train)
6058

6159
# Step 4: Use our trained model and predict a class for each pixel in the dataset

webknossos/webknossos/client/_upload_dataset.py

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import os
22
from functools import lru_cache
33
from pathlib import Path
4+
from tempfile import TemporaryDirectory
45
from time import gmtime, strftime
56
from typing import Dict, Iterator, List, NamedTuple, Optional, Tuple
67
from uuid import uuid4
@@ -58,7 +59,8 @@ def _cached_get_upload_datastore(context: _WebknossosContext) -> str:
5859

5960

6061
def _walk(
61-
path: Path, base_path: Optional[Path] = None
62+
path: Path,
63+
base_path: Optional[Path] = None,
6264
) -> Iterator[Tuple[Path, Path, int]]:
6365
if base_path is None:
6466
base_path = path
@@ -80,6 +82,19 @@ def upload_dataset(
8082
if layers_to_link is None:
8183
layers_to_link = []
8284
context = _get_context()
85+
layer_names_to_link = set(i.new_layer_name or i.layer_name for i in layers_to_link)
86+
if len(layer_names_to_link.intersection(dataset.layers.keys())) > 0:
87+
with TemporaryDirectory() as tmpdir:
88+
tmp_ds = dataset.shallow_copy_dataset(
89+
tmpdir, name=dataset.name, layers_to_ignore=layer_names_to_link
90+
)
91+
return upload_dataset(
92+
tmp_ds,
93+
new_dataset_name=new_dataset_name,
94+
layers_to_link=layers_to_link,
95+
jobs=jobs,
96+
)
97+
8398
file_infos = list(_walk(dataset.path))
8499
total_file_size = sum(size for _, _, size in file_infos)
85100
# replicates https://github.com/scalableminds/webknossos/blob/master/frontend/javascripts/admin/dataset/dataset_upload_view.js

webknossos/webknossos/dataset/dataset.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,12 @@
88
from os import PathLike, makedirs
99
from os.path import basename, join, normpath
1010
from pathlib import Path
11-
from shutil import rmtree
1211
from typing import (
1312
TYPE_CHECKING,
1413
Any,
1514
ContextManager,
1615
Dict,
16+
Iterable,
1717
List,
1818
Optional,
1919
Tuple,
@@ -559,12 +559,17 @@ def delete_layer(self, layer_name: str) -> None:
559559
raise IndexError(
560560
f"Removing layer {layer_name} failed. There is no layer with this name"
561561
)
562+
layer_path = self._layers[layer_name].path
562563
del self._layers[layer_name]
563564
self._properties.data_layers = [
564565
layer for layer in self._properties.data_layers if layer.name != layer_name
565566
]
566567
# delete files on disk
567-
rmtree(join(self.path, layer_name))
568+
if layer_path.is_symlink():
569+
layer_path.unlink()
570+
else:
571+
# rmtree does not recurse into linked dirs, but removes the link
572+
shutil.rmtree(layer_path)
568573
self._export_as_json()
569574

570575
def add_symlink_layer(
@@ -714,10 +719,10 @@ def copy_dataset(
714719

715720
def shallow_copy_dataset(
716721
self,
717-
new_dataset_path: Path,
722+
new_dataset_path: Union[str, PathLike],
718723
name: Optional[str] = None,
719724
make_relative: bool = False,
720-
layers_to_ignore: Optional[List[str]] = None,
725+
layers_to_ignore: Optional[Iterable[str]] = None,
721726
) -> "Dataset":
722727
"""
723728
Create a new dataset at the given path. Link all mags of all existing layers.

0 commit comments

Comments
 (0)