|
13 | 13 | from larray.core.metadata import Metadata
|
14 | 14 | from larray.util.misc import LHDFStore
|
15 | 15 | from larray.inout.session import register_file_handler
|
16 |
| -from larray.inout.common import FileHandler, _supported_larray_types, _supported_scalars_types |
| 16 | +from larray.inout.common import FileHandler, _supported_typenames, _supported_scalars_types |
17 | 17 | from larray.inout.pandas import df_asarray
|
18 | 18 | from larray.example import get_example_filepath
|
19 | 19 |
|
20 | 20 |
|
21 |
| -_hdf_supported_types = _supported_larray_types + _supported_scalars_types |
22 |
| - |
23 |
| - |
24 |
| -class ScalarHDF(object): |
25 |
| - def __init__(self, value): |
26 |
| - _type = type(value).__name__ |
27 |
| - if not isinstance(value, _supported_scalars_types): |
28 |
| - raise TypeError("Type {} which is not currently supported by the HDF5 format".format(_type)) |
29 |
| - self.value = value |
30 |
| - self._type = _type |
31 |
| - |
32 |
| - def to_hdf(self, filepath, key): |
33 |
| - key = _translate_group_key_hdf(key) |
34 |
| - s = pd.Series(data=self.value) |
35 |
| - with LHDFStore(filepath) as store: |
36 |
| - store.put(key, s) |
37 |
| - store.get_storer(key).attrs.type = self._type |
38 |
| - |
39 |
| - |
40 | 21 | # for backward compatibility (larray < 0.29) but any object read from an hdf file should have
|
41 | 22 | # an attribute 'type'
|
42 | 23 | def _get_type_from_attrs(attrs):
|
@@ -139,7 +120,7 @@ def read_hdf(filepath_or_buffer, key, fill_value=nan, na=nan, sort_rows=False, s
|
139 | 120 | key = np.char.decode(key, 'utf-8')
|
140 | 121 | axis = read_hdf(filepath_or_buffer, attrs['axis_key'])
|
141 | 122 | res = LGroup(key=key, name=name, axis=axis)
|
142 |
| - elif _type in {cls.__name__ for cls in _supported_scalars_types}: |
| 123 | + elif _type in _supported_typenames: |
143 | 124 | res = pd_obj.values
|
144 | 125 | assert len(res) == 1
|
145 | 126 | res = res[0]
|
@@ -167,26 +148,29 @@ def list_items(self):
|
167 | 148 | items += [(key.split('/')[-1], 'Group_Backward_Comp') for key in keys if '__groups__' in key]
|
168 | 149 | return items
|
169 | 150 |
|
170 |
| - def _read_item(self, key, type, *args, **kwargs): |
171 |
| - if type in {cls.__name__ for cls in _hdf_supported_types}: |
| 151 | + def _read_item(self, key, typename, *args, **kwargs): |
| 152 | + if typename in _supported_typenames: |
172 | 153 | hdf_key = '/' + key
|
173 | 154 | # ---- for backward compatibility (LArray < 0.33) ----
|
174 |
| - elif type == 'Axis_Backward_Comp': |
| 155 | + elif typename == 'Axis_Backward_Comp': |
175 | 156 | hdf_key = '__axes__/' + key
|
176 |
| - elif type == 'Group_Backward_Comp': |
| 157 | + elif typename == 'Group_Backward_Comp': |
177 | 158 | hdf_key = '__groups__/' + key
|
178 | 159 | else:
|
179 | 160 | raise TypeError()
|
180 | 161 | return read_hdf(self.handle, hdf_key, *args, **kwargs)
|
181 | 162 |
|
182 | 163 | def _dump_item(self, key, value, *args, **kwargs):
|
183 |
| - if isinstance(value, _supported_scalars_types): |
184 |
| - value = ScalarHDF(value) |
185 |
| - elif isinstance(value, Group): |
186 |
| - kwargs['axis_key'] = '/' + value.axis.name |
187 |
| - if hasattr(value, 'to_hdf'): |
188 |
| - hdf_key = '/' + key |
| 164 | + hdf_key = '/' + key |
| 165 | + if isinstance(value, (Array, Axis)): |
189 | 166 | value.to_hdf(self.handle, hdf_key, *args, **kwargs)
|
| 167 | + elif isinstance(value, Group): |
| 168 | + hdf_axis_key = '/' + value.axis.name |
| 169 | + value.to_hdf(self.handle, hdf_key, hdf_axis_key, *args, **kwargs) |
| 170 | + elif isinstance(value, _supported_scalars_types): |
| 171 | + s = pd.Series(data=value) |
| 172 | + self.handle.put(hdf_key, s) |
| 173 | + self.handle.get_storer(hdf_key).attrs.type = type(value).__name__ |
190 | 174 | else:
|
191 | 175 | raise TypeError()
|
192 | 176 |
|
|
0 commit comments