1212from typing import List
1313
1414
15+ def get_datasource_path (dataset_path ):
16+ return path .join (dataset_path , "datasource-properties.json" )
17+
18+
1519def create_parser ():
1620 parser = ArgumentParser ()
1721
1822 parser .add_argument ("path" , help = "Directory containing the dataset." )
1923
20- parser .add_argument ("--name" , "-n" , help = "Name of the dataset" )
21-
24+ parser .add_argument ("--name" , "-n" , help = "Name of the dataset" , default = None )
2225 parser .add_argument (
2326 "--scale" ,
2427 "-s" ,
2528 help = "Scale of the dataset (e.g. 11.2,11.2,25)" ,
2629 default = "1,1,1" ,
2730 )
2831
32+ parser .add_argument ("--refresh" , "-r" , default = False , action = "store_true" )
33+
2934 group = parser .add_mutually_exclusive_group ()
3035 group .add_argument (
3136 "--compute_max_id" ,
@@ -39,6 +44,23 @@ def create_parser():
3944 return parser
4045
4146
47+ def write_datasource_properties (dataset_path , datasource_properties ):
48+ datasource_properties_path = get_datasource_path (dataset_path )
49+ with open (datasource_properties_path , "wt" ) as datasource_properties_file :
50+ json .dump (datasource_properties , datasource_properties_file , indent = 2 )
51+
52+
53+ def read_datasource_properties (dataset_path ):
54+ with open (get_datasource_path (dataset_path ), "r" ) as datasource_properties_file :
55+ return json .load (datasource_properties_file )
56+
57+
58+ """
59+ Creates a datasource-properties.json file with the specified properties
60+ for the given dataset path. Common layers are detected automatically.
61+ """
62+
63+
4264def write_webknossos_metadata (
4365 dataset_path ,
4466 name ,
@@ -50,28 +72,71 @@ def write_webknossos_metadata(
5072
5173 # Generate a metadata file for webKnossos
5274 # Currently includes no source of information for team
53- datasource_properties_path = path .join (dataset_path , "datasource-properties.json" )
5475 layers = list (
5576 detect_layers (dataset_path , max_id , compute_max_id , exact_bounding_box )
5677 )
57- with open (datasource_properties_path , "wt" ) as datasource_properties_json :
58- json .dump (
59- {
60- "id" : {"name" : name , "team" : "<unknown>" },
61- "dataLayers" : layers ,
62- "scale" : scale ,
63- },
64- datasource_properties_json ,
65- indent = 2 ,
78+ write_datasource_properties (
79+ dataset_path ,
80+ {
81+ "id" : {"name" : name , "team" : "<unknown>" },
82+ "dataLayers" : layers ,
83+ "scale" : scale ,
84+ },
85+ )
86+
87+
88+ """
89+ Updates the datasource-properties.json file for a given dataset.
90+ Use this method if you added (or removed) layers and/or changed magnifications for
91+ existing layers.
92+
93+ Raises an exception if the datasource-properties.json file does not exist, yet.
94+ In this case, use write_webknossos_metadata instead.
95+ """
96+
97+
98+ def refresh_metadata (
99+ wkw_path , max_id = 0 , compute_max_id = False , exact_bounding_box : Optional [dict ] = None
100+ ):
101+ dataset_path = get_datasource_path (wkw_path )
102+ if not path .exists (dataset_path ):
103+ raise Exception (
104+ "datasource-properties.json file could not be found. Please use write_webknossos_metadata to create it."
66105 )
67106
107+ datasource_properties = read_datasource_properties (wkw_path )
108+ existing_layers_dict = {
109+ layer ["name" ]: layer for layer in datasource_properties ["dataLayers" ]
110+ }
111+
112+ new_layers = list (
113+ detect_layers (wkw_path , max_id , compute_max_id , exact_bounding_box )
114+ )
115+
116+ # Merge the freshly read layers with the existing layer information, so that information,
117+ # such as bounding boxes, are not lost for existing layers.
118+ # For existing layers, only the resolutions will be updated.
119+ merged_layers = []
120+ for new_layer in new_layers :
121+ layer_name = new_layer ["name" ]
122+ if layer_name in existing_layers_dict :
123+ existing_layer = existing_layers_dict [layer_name ]
124+ # Update the resolutions
125+ existing_layer ["wkwResolutions" ] = new_layer ["wkwResolutions" ]
126+ merged_layers .append (existing_layer )
127+ else :
128+ merged_layers .append (new_layer )
129+
130+ datasource_properties ["dataLayers" ] = merged_layers
131+ write_datasource_properties (wkw_path , datasource_properties )
132+
68133
69134def read_metadata_for_layer (wkw_path , layer_name ):
70- datasource_properties = json .load (
71- open (path .join (wkw_path , "datasource-properties.json" ), "r" )
72- )
135+ datasource_properties = read_datasource_properties (wkw_path )
136+
73137 layers = datasource_properties ["dataLayers" ]
74138 layer_info = next (layer for layer in layers if layer ["name" ] == layer_name )
139+
75140 dtype = np .dtype (layer_info ["elementClass" ])
76141 bounding_box = layer_info ["boundingBox" ]
77142 origin = bounding_box ["topLeft" ]
@@ -146,13 +211,15 @@ def detect_resolutions(dataset_path, layer) -> List[Mag]:
146211def detect_standard_layer (dataset_path , layer_name , exact_bounding_box = None ):
147212 # Perform metadata detection for well-known layers
148213
214+ mags = list (detect_resolutions (dataset_path , layer_name ))
215+ mags = sorted (mags )
216+ assert len (mags ) > 0 , "No resolutions found"
217+
149218 if exact_bounding_box is None :
150- bbox = detect_bbox (dataset_path , layer_name )
219+ bbox = detect_bbox (dataset_path , layer_name , mags [ 0 ] )
151220 else :
152221 bbox = exact_bounding_box
153222
154- mags = list (detect_resolutions (dataset_path , layer_name ))
155- mags = sorted (mags )
156223 resolutions = [
157224 {
158225 "resolution" : mag .to_array (),
@@ -161,7 +228,6 @@ def detect_standard_layer(dataset_path, layer_name, exact_bounding_box=None):
161228 for mag in mags
162229 ]
163230
164- assert len (mags ) > 0 , "No resolutions found"
165231 dtype = detect_dtype (dataset_path , layer_name , mags [0 ])
166232
167233 return {
@@ -205,9 +271,10 @@ def detect_segmentation_layer(
205271
206272
207273def detect_layers (dataset_path , max_id , compute_max_id , exact_bounding_box = None ):
208- # Detect metadata for well-known layers, e.g. color and segmentation
209- if path .exists (path .join (dataset_path , "color" )):
210- yield detect_standard_layer (dataset_path , "color" , exact_bounding_box )
274+ # Detect metadata for well-known layers (i.e., color, prediction and segmentation)
275+ for layer_name in ["color" , "prediction" ]:
276+ if path .exists (path .join (dataset_path , layer_name )):
277+ yield detect_standard_layer (dataset_path , layer_name , exact_bounding_box )
211278 if path .exists (path .join (dataset_path , "segmentation" )):
212279 yield detect_segmentation_layer (
213280 dataset_path , "segmentation" , max_id , compute_max_id , exact_bounding_box
@@ -217,7 +284,17 @@ def detect_layers(dataset_path, max_id, compute_max_id, exact_bounding_box=None)
217284if __name__ == "__main__" :
218285 logging .basicConfig (level = logging .DEBUG )
219286 args = create_parser ().parse_args ()
220- scale = tuple (float (x ) for x in args .scale .split ("," ))
221- write_webknossos_metadata (
222- args .path , args .name , scale , args .max_id , args .compute_max_id
223- )
287+ if not args .refresh :
288+ assert (
289+ args .name is not None
290+ ), "Please provide a name via --name to create meta data."
291+ scale = tuple (float (x ) for x in args .scale .split ("," ))
292+ write_webknossos_metadata (
293+ args .path , args .name , scale , args .max_id , args .compute_max_id
294+ )
295+ else :
296+ if args .name is not None :
297+ logging .warn (
298+ "The --name argument is ignored, since --refresh was provided."
299+ )
300+ refresh_metadata (args .path , args .max_id , args .compute_max_id )
0 commit comments