Skip to content

Commit f553399

Browse files
committed
fix hdfdict
1 parent 86cd132 commit f553399

File tree

1 file changed

+8
-7
lines changed

1 file changed

+8
-7
lines changed

src/pynxtools/dataconverter/hdfdict.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88
import h5py
99
import yaml
10-
from numpy import str_
1110

1211
TYPE = "_type_"
1312

@@ -45,7 +44,8 @@ def unpack_dataset(item):
4544
4645
"""
4746
value = item[()]
48-
type_id = item.attrs.get(TYPE, str_()).astype(str)
47+
type_id = item.attrs.get(TYPE, "")
48+
4949
if type_id == "datetime":
5050
if hasattr(value, "__iter__"):
5151
value = [datetime.fromtimestamp(ts) for ts in value]
@@ -62,7 +62,7 @@ def unpack_dataset(item):
6262
value = tuple(value)
6363

6464
elif type_id == "str":
65-
value = str_(value).astype(str)
65+
value = value.decode("utf-8") if isinstance(value, bytes) else value
6666

6767
return value
6868

@@ -181,15 +181,16 @@ def pack_dataset(hdfobject, key, value):
181181
attr_data = None
182182

183183
if attr_data:
184-
ds.attrs.create(name=TYPE, data=str_(attr_data))
184+
ds.attrs.create(name=TYPE, data=attr_data.encode("utf-8"))
185185

186186
except (TypeError, ValueError):
187187
# Obviously the data was not serializable. To give it
188188
# a last try; serialize it to yaml
189189
# and save it to the hdf file:
190-
ds = hdfobject.create_dataset(name=key, data=str_(yaml.safe_dump(value)))
191-
192-
ds.attrs.create(name=TYPE, data=str_("yaml"))
190+
ds = hdfobject.create_dataset(
191+
name=key, data=yaml.safe_dump(value).encode("utf-8")
192+
)
193+
ds.attrs.create(name=TYPE, data=b"yaml")
193194
# if this fails again, restructure your data!
194195

195196

0 commit comments

Comments
 (0)