Skip to content

Commit 66074e6

Browse files
committed
>> follow up review comments from gdmenten (15/01/2020)
1 parent 100b682 commit 66074e6

File tree

3 files changed

+21
-38
lines changed

3 files changed

+21
-38
lines changed

larray/inout/common.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
# only for HDF5 and pickle formats
1717
# support list, tuple and dict?
1818
_supported_scalars_types = (int, float, bool, basestring, date, time, datetime)
19+
_supported_types = _supported_larray_types + _supported_scalars_types
20+
_supported_typenames = {cls.__name__ for cls in _supported_types}
1921

2022

2123
def _get_index_col(nb_axes=None, index_col=None, wide=True):

larray/inout/hdf.py

Lines changed: 15 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -13,30 +13,11 @@
1313
from larray.core.metadata import Metadata
1414
from larray.util.misc import LHDFStore
1515
from larray.inout.session import register_file_handler
16-
from larray.inout.common import FileHandler, _supported_larray_types, _supported_scalars_types
16+
from larray.inout.common import FileHandler, _supported_typenames, _supported_scalars_types
1717
from larray.inout.pandas import df_asarray
1818
from larray.example import get_example_filepath
1919

2020

21-
_hdf_supported_types = _supported_larray_types + _supported_scalars_types
22-
23-
24-
class ScalarHDF(object):
25-
def __init__(self, value):
26-
_type = type(value).__name__
27-
if not isinstance(value, _supported_scalars_types):
28-
raise TypeError("Type {} which is not currently supported by the HDF5 format".format(_type))
29-
self.value = value
30-
self._type = _type
31-
32-
def to_hdf(self, filepath, key):
33-
key = _translate_group_key_hdf(key)
34-
s = pd.Series(data=self.value)
35-
with LHDFStore(filepath) as store:
36-
store.put(key, s)
37-
store.get_storer(key).attrs.type = self._type
38-
39-
4021
# for backward compatibility (larray < 0.29) but any object read from an hdf file should have
4122
# an attribute 'type'
4223
def _get_type_from_attrs(attrs):
@@ -139,7 +120,7 @@ def read_hdf(filepath_or_buffer, key, fill_value=nan, na=nan, sort_rows=False, s
139120
key = np.char.decode(key, 'utf-8')
140121
axis = read_hdf(filepath_or_buffer, attrs['axis_key'])
141122
res = LGroup(key=key, name=name, axis=axis)
142-
elif _type in {cls.__name__ for cls in _supported_scalars_types}:
123+
elif _type in _supported_typenames:
143124
res = pd_obj.values
144125
assert len(res) == 1
145126
res = res[0]
@@ -167,26 +148,29 @@ def list_items(self):
167148
items += [(key.split('/')[-1], 'Group_Backward_Comp') for key in keys if '__groups__' in key]
168149
return items
169150

170-
def _read_item(self, key, type, *args, **kwargs):
171-
if type in {cls.__name__ for cls in _hdf_supported_types}:
151+
def _read_item(self, key, typename, *args, **kwargs):
152+
if typename in _supported_typenames:
172153
hdf_key = '/' + key
173154
# ---- for backward compatibility (LArray < 0.33) ----
174-
elif type == 'Axis_Backward_Comp':
155+
elif typename == 'Axis_Backward_Comp':
175156
hdf_key = '__axes__/' + key
176-
elif type == 'Group_Backward_Comp':
157+
elif typename == 'Group_Backward_Comp':
177158
hdf_key = '__groups__/' + key
178159
else:
179160
raise TypeError()
180161
return read_hdf(self.handle, hdf_key, *args, **kwargs)
181162

182163
def _dump_item(self, key, value, *args, **kwargs):
183-
if isinstance(value, _supported_scalars_types):
184-
value = ScalarHDF(value)
185-
elif isinstance(value, Group):
186-
kwargs['axis_key'] = '/' + value.axis.name
187-
if hasattr(value, 'to_hdf'):
188-
hdf_key = '/' + key
164+
hdf_key = '/' + key
165+
if isinstance(value, (Array, Axis)):
189166
value.to_hdf(self.handle, hdf_key, *args, **kwargs)
167+
elif isinstance(value, Group):
168+
hdf_axis_key = '/' + value.axis.name
169+
value.to_hdf(self.handle, hdf_key, hdf_axis_key, *args, **kwargs)
170+
elif isinstance(value, _supported_scalars_types):
171+
s = pd.Series(data=value)
172+
self.handle.put(hdf_key, s)
173+
self.handle.get_storer(hdf_key).attrs.type = type(value).__name__
190174
else:
191175
raise TypeError()
192176

larray/inout/pickle.py

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,7 @@
99
from larray.core.metadata import Metadata
1010
from larray.util.compat import pickle
1111
from larray.inout.session import register_file_handler
12-
from larray.inout.common import FileHandler, _supported_larray_types, _supported_scalars_types
13-
14-
15-
_pickle_supported_types = _supported_larray_types + _supported_scalars_types
12+
from larray.inout.common import FileHandler, _supported_types, _supported_typenames, _supported_scalars_types
1613

1714

1815
@register_file_handler('pickle', ['pkl', 'pickle'])
@@ -39,14 +36,14 @@ def list_items(self):
3936
items += [(key, 'Array') for key, value in self.data.items() if isinstance(value, Array)]
4037
return items
4138

42-
def _read_item(self, key, type, *args, **kwargs):
43-
if type in {cls.__name__ for cls in _pickle_supported_types}:
39+
def _read_item(self, key, typename, *args, **kwargs):
40+
if typename in _supported_typenames:
4441
return self.data[key]
4542
else:
4643
raise TypeError()
4744

4845
def _dump_item(self, key, value, *args, **kwargs):
49-
if isinstance(value, _pickle_supported_types):
46+
if isinstance(value, _supported_types):
5047
self.data[key] = value
5148
else:
5249
raise TypeError()

0 commit comments

Comments
 (0)