Skip to content

Commit 74d3a62

Browse files
committed
update datasetvalues for in init
1 parent 3d9003c commit 74d3a62

File tree

2 files changed

+67
-7
lines changed

2 files changed

+67
-7
lines changed

src/h5json/hsdsstore/hsds_writer.py

Lines changed: 27 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -436,14 +436,31 @@ def updateValues(self, dset_ids):
436436
if getCollectionForId(dset_id) != "datasets":
437437
continue # ignore groups and datatypes
438438
dset_json = self.db.getObjectById(dset_id)
439-
if "updates" not in dset_json:
439+
dset_shape = dset_json["shape"]
440+
dset_class = dset_shape['class']
441+
if dset_class == "H5S_NULL":
442+
# no data to update
440443
continue
441-
updates = dset_json["updates"]
442-
if updates:
443-
self.log.debug(f"hsds_writer> {dset_id} update count: {len(updates)}")
444-
for (sel, arr) in updates:
445-
self.updateValue(dset_id, sel, arr)
446-
updates.clear()
444+
if self._init:
445+
# get all data for the dataset
446+
# TBD: do this by chunks
447+
if dset_class == "H5S_SCALAR":
448+
dset_dims = []
449+
else:
450+
dset_dims = dset_shape["dims"]
451+
sel_all = selections.select(dset_dims, ...)
452+
arr = self.db.getDatasetValues(dset_id, sel_all)
453+
if arr is not None:
454+
self.updateValue(dset_id, sel_all, arr)
455+
else:
456+
if "updates" not in dset_json:
457+
continue
458+
updates = dset_json["updates"]
459+
if updates:
460+
self.log.debug(f"hsds_writer> {dset_id} update count: {len(updates)}")
461+
for (sel, arr) in updates:
462+
self.updateValue(dset_id, sel, arr)
463+
updates.clear()
447464

448465
def flush(self):
449466
""" Write dirty items """
@@ -472,6 +489,9 @@ def flush(self):
472489
self.createObjects(obj_ids)
473490
dirty_ids.update(obj_ids)
474491
dirty_ids.add(root_id) # add back root for attribute and link creation
492+
if not self._no_data:
493+
# initialize dataset values
494+
self.updateValues(obj_ids)
475495
self._init = False
476496
elif self.db.new_objects:
477497
self.log.debug(f"hsds_writer> {len(self.db.new_objects)} objects to create")

test/unit/hsds_writer_test.py

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -199,6 +199,46 @@ def testH5PyToHS(self):
199199
self.assertEqual(g1_json["attributeCount"], 0)
200200
self.assertEqual(g1_json["linkCount"], 2)
201201

202+
# get the g1.1 link
203+
http_rsp = http_conn.GET(f"/groups/{g1_id}/links/g1.1")
204+
self.assertEqual(http_rsp.status_code, 200)
205+
rsp_json = http_rsp.json()
206+
g1_1_link = rsp_json["link"]
207+
g1_1_id = g1_1_link["id"]
208+
209+
# Get the g1.1 json
210+
http_rsp = http_conn.GET(f"/groups/{g1_1_id}")
211+
self.assertEqual(http_rsp.status_code, 200)
212+
g1_json = http_rsp.json()
213+
self.assertEqual(g1_json["attributeCount"], 0)
214+
self.assertEqual(g1_json["linkCount"], 2)
215+
216+
# get the dset1.1.1 link
217+
http_rsp = http_conn.GET(f"/groups/{g1_1_id}/links/dset1.1.1")
218+
self.assertEqual(http_rsp.status_code, 200)
219+
rsp_json = http_rsp.json()
220+
dset1_1_1_link = rsp_json["link"]
221+
dset1_1_1_id = dset1_1_1_link["id"]
222+
223+
# get the dset1.1.1 json
224+
http_rsp = http_conn.GET(f"/datasets/{dset1_1_1_id}")
225+
self.assertEqual(http_rsp.status_code, 200)
226+
dset1_1_1_json = http_rsp.json()
227+
dset1_1_1_shape = dset1_1_1_json["shape"]
228+
self.assertEqual(dset1_1_1_shape["class"], "H5S_SIMPLE")
229+
230+
# get the dset1_1_1 data
231+
http_rsp = http_conn.GET(f"/datasets/{dset1_1_1_id}/value")
232+
self.assertEqual(http_rsp.status_code, 200)
233+
rsp_json = http_rsp.json()
234+
dset1_1_1_value = rsp_json["value"]
235+
self.assertEqual(len(dset1_1_1_value), 10)
236+
for i in range(10):
237+
row = dset1_1_1_value[i]
238+
self.assertEqual(len(row), 10)
239+
for j in range(10):
240+
self.assertEqual(row[j], i * j)
241+
202242
db.close()
203243

204244

0 commit comments

Comments
 (0)