Skip to content

Commit 24d6138

Browse files
committed
fix for dset values with multidimensional datasets and compound types
1 parent 768070d commit 24d6138

File tree

2 files changed

+41
-35
lines changed

2 files changed

+41
-35
lines changed

h5json/hdf5db.py

Lines changed: 32 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -2071,9 +2071,12 @@ def createRegionReference(self, item):
20712071
Convert a list to a tuple, recursively.
20722072
Example. [[1,2],[3,4]] -> ((1,2),(3,4))
20732073
"""
2074-
def toTuple(self, data):
2074+
def toTuple(self, rank, data):
20752075
if type(data) in (list, tuple):
2076-
return tuple(self.toTuple(x) for x in data)
2076+
if rank > 0:
2077+
return list(self.toTuple(rank-1, x) for x in data)
2078+
else:
2079+
return tuple(self.toTuple(rank-1, x) for x in data)
20772080
else:
20782081
return data
20792082

@@ -2421,25 +2424,6 @@ def setDatasetValuesByUuid(self, obj_uuid, data, slices=None, format="json"):
24212424
msg = "Only JSON is supported for for this data type"
24222425
self.log.info(msg)
24232426
raise IOError(errno.EINVAL, msg)
2424-
2425-
2426-
# need some special conversion for compound types --
2427-
# each element must be a tuple, but the JSON decoder
2428-
# gives us a list instead.
2429-
if format != "binary" and len(dset.dtype) > 1 and type(data) in (list, tuple):
2430-
converted_data = []
2431-
for i in range(len(data)):
2432-
converted_data.append(self.toTuple(data[i]))
2433-
data = converted_data
2434-
else:
2435-
h5t_check = h5py.check_dtype(ref=dset.dtype)
2436-
if h5t_check in (h5py.Reference, h5py.RegionReference):
2437-
# convert data to data refs
2438-
if format == "binary":
2439-
msg = "Only JSON is supported for for this data type"
2440-
self.log.info(msg)
2441-
raise IOError(errno.EINVAL, msg)
2442-
data = self.listToRef(data)
24432427

24442428
if slices is None:
24452429
slices = []
@@ -2487,6 +2471,25 @@ def setDatasetValuesByUuid(self, obj_uuid, data, slices=None, format="json"):
24872471
np_shape = tuple(np_shape) # for comparison with ndarray shape
24882472

24892473
self.log.info("selection shape:" + str(np_shape))
2474+
2475+
2476+
# need some special conversion for compound types --
2477+
# each element must be a tuple, but the JSON decoder
2478+
# gives us a list instead.
2479+
if format != "binary" and len(dset.dtype) > 1 and type(data) in (list, tuple):
2480+
data = self.toTuple(rank, data)
2481+
#for i in range(len(data)):
2482+
# converted_data.append(self.toTuple(data[i]))
2483+
#data = converted_data
2484+
else:
2485+
h5t_check = h5py.check_dtype(ref=dset.dtype)
2486+
if h5t_check in (h5py.Reference, h5py.RegionReference):
2487+
# convert data to data refs
2488+
if format == "binary":
2489+
msg = "Only JSON is supported for for this data type"
2490+
self.log.info(msg)
2491+
raise IOError(errno.EINVAL, msg)
2492+
data = self.listToRef(data)
24902493

24912494
if format == "binary":
24922495
if npoints*itemSize != len(data):
@@ -2577,17 +2580,17 @@ def setDatasetValuesByPointSelection(self, obj_uuid, data, points, format="json"
25772580
msg = "Only JSON is supported for for this data type"
25782581
self.log.info(msg)
25792582
raise IOError(errno.EINVAL, msg)
2580-
2583+
2584+
rank = len(dset.shape)
2585+
25812586
# need some special conversion for compound types --
25822587
# each element must be a tuple, but the JSON decoder
25832588
# gives us a list instead.
25842589
if format == "json" and len(dset.dtype) > 1 and type(data) in (list, tuple):
2585-
converted_data = []
2586-
for i in range(len(data)):
2587-
converted_data.append(self.toTuple(data[i]))
2588-
data = converted_data
2589-
2590-
rank = len(dset.shape)
2590+
converted_data = self.toTuple(rank, data)
2591+
#for i in range(len(data)):
2592+
# converted_data.append(self.toTuple(data[i]))
2593+
#data = converted_data
25912594

25922595
if format == "json":
25932596

@@ -2741,7 +2744,7 @@ def createDataset(self, datatype, datashape, max_shape=None,
27412744
ndscalar = np.zeros((), dtype=dt)
27422745
for i in range(len(fillvalue)):
27432746
field = dt.names[i]
2744-
ndscalar[field] = self.toTuple(fillvalue[i])
2747+
ndscalar[field] = self.toTuple(0, fillvalue[i])
27452748
fillvalue = ndscalar
27462749

27472750
if fillvalue:

test/unit/hdf5dbTest.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1005,13 +1005,16 @@ def testToRef(self):
10051005

10061006
def testToTuple(self):
10071007
filepath = getFile('empty.h5', 'totuple.h5')
1008+
data1d = [1,2,3]
1009+
data2d = [[1,2],[3,4]]
1010+
data3d = [[[1,2],[3,4]], [[5,6],[7,8]]]
10081011
with Hdf5db(filepath, app_logger=self.log) as db:
1009-
self.assertEqual(db.toTuple( [1,2,3] ), (1,2,3) )
1010-
self.assertEqual(db.toTuple( [[1,2],[3,4]] ), ((1,2),(3,4)) )
1011-
self.assertEqual(db.toTuple( ([1,2],[3,4]) ), ((1,2),(3,4)) )
1012-
self.assertEqual(db.toTuple( [(1,2),(3,4)] ), ((1,2),(3,4)) )
1013-
self.assertEqual(db.toTuple( [[[1,2],[3,4]], [[5,6],[7,8]]] ),
1014-
(((1,2),(3,4)), ((5,6),(7,8))) )
1012+
self.assertEqual(db.toTuple(1, data1d ), [1,2,3] )
1013+
self.assertEqual(db.toTuple(2, data2d ), [[1,2],[3,4]] )
1014+
self.assertEqual(db.toTuple(1, data2d ), [(1,2),(3,4)] )
1015+
self.assertEqual(db.toTuple(3, data3d), [[[1,2],[3,4]], [[5,6],[7,8]]] )
1016+
self.assertEqual(db.toTuple(2, data3d), [[(1,2),(3,4)], [(5,6),(7,8)]] )
1017+
self.assertEqual(db.toTuple(1, data3d), [((1,2),(3,4)), ((5,6),(7,8))] )
10151018

10161019

10171020
def testBytesArrayToList(self):

0 commit comments

Comments
 (0)