@@ -135,7 +135,7 @@ def get_driver(self, img_path):
135135 return "zarr"
136136 elif ".n5" in img_path :
137137 return "n5"
138- elif is_neuroglancer_precomputed (img_path ):
138+ elif is_precomputed (img_path ):
139139 return "neuroglancer_precomputed"
140140 else :
141141 raise ValueError (f"Unsupported image format: { img_path } " )
@@ -274,7 +274,7 @@ def get_storage_driver(img_path):
274274 raise ValueError (f"Unsupported path type: { img_path } " )
275275
276276
277- def is_neuroglancer_precomputed (img_path ):
277+ def is_precomputed (img_path ):
278278 """
279279 Checks if the path points to a Neuroglancer precomputed dataset.
280280
@@ -288,19 +288,22 @@ def is_neuroglancer_precomputed(img_path):
288288 bool
289289 True if the path appears to be a Neuroglancer precomputed dataset.
290290 """
291- info_path = os .path .join (img_path , "info" )
292291 try :
293- spec = {
294- "driver" : "file" ,
295- "kvstore" : {
296- "driver" : "file" ,
297- "path" : info_path
298- }
299- }
300- info_store = ts .open (spec , open = True ).result ()
301- info_json = info_store .read ().result ().decode ("utf-8" )
302- info = json .loads (info_json )
303- return all (k in info for k in ["data_type" , "scales" , "type" ])
292+ # Build kvstore spec
293+ bucket_name , path = util .parse_cloud_path (img_path )
294+ kv = {"driver" : "gcs" , "bucket" : bucket_name , "path" : path }
295+
296+ # Open the info file
297+ store = ts .KvStore .open (kv ).result ()
298+ raw = store .read (b"info" ).result ()
299+
300+ # Only proceed if the key exists and has content
301+ if raw .state != "missing" and raw .value :
302+ info = json .loads (raw .value .decode ("utf8" ))
303+ is_valid_type = info .get ("type" ) in ("image" , "segmentation" )
304+ if isinstance (info , dict ) and is_valid_type and "scales" in info :
305+ return True
306+ return False
304307 except Exception :
305308 return False
306309
0 commit comments