Skip to content

Commit 724a47e

Browse files
PProfizigithub-actions[bot]
authored andcommitted
update generated code
1 parent 8c14e36 commit 724a47e

File tree

7 files changed

+86
-4
lines changed

7 files changed

+86
-4
lines changed

doc/source/_static/dpf_operators.html

Lines changed: 6 additions & 4 deletions
Large diffs are not rendered by default.

src/ansys/dpf/core/operators/result/migrate_to_h5dpf.py

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@ class migrate_to_h5dpf(Operator):
2525
2626
Parameters
2727
----------
28+
h5_chunk_size: int or GenericDataContainer, optional
29+
Size of each HDF5 chunk in kilobytes (KB). Default: 1 MB when compression is enabled; for uncompressed datasets, the default is the full dataset size x dimension.
2830
dataset_size_compression_threshold: int or GenericDataContainer, optional
2931
Integer value that defines the minimum dataset size (in bytes) to use h5 native compression Applicable for arrays of floats, doubles and integers.
3032
h5_native_compression: int or DataTree or GenericDataContainer, optional
@@ -58,6 +60,8 @@ class migrate_to_h5dpf(Operator):
5860
>>> op = dpf.operators.result.migrate_to_h5dpf()
5961
6062
>>> # Make input connections
63+
>>> my_h5_chunk_size = int()
64+
>>> op.inputs.h5_chunk_size.connect(my_h5_chunk_size)
6165
>>> my_dataset_size_compression_threshold = int()
6266
>>> op.inputs.dataset_size_compression_threshold.connect(my_dataset_size_compression_threshold)
6367
>>> my_h5_native_compression = int()
@@ -81,6 +85,7 @@ class migrate_to_h5dpf(Operator):
8185
8286
>>> # Instantiate operator and connect inputs in one line
8387
>>> op = dpf.operators.result.migrate_to_h5dpf(
88+
... h5_chunk_size=my_h5_chunk_size,
8489
... dataset_size_compression_threshold=my_dataset_size_compression_threshold,
8590
... h5_native_compression=my_h5_native_compression,
8691
... export_floats=my_export_floats,
@@ -99,6 +104,7 @@ class migrate_to_h5dpf(Operator):
99104

100105
def __init__(
101106
self,
107+
h5_chunk_size=None,
102108
dataset_size_compression_threshold=None,
103109
h5_native_compression=None,
104110
export_floats=None,
@@ -115,6 +121,8 @@ def __init__(
115121
super().__init__(name="hdf5::h5dpf::migrate_file", config=config, server=server)
116122
self._inputs = InputsMigrateToH5Dpf(self)
117123
self._outputs = OutputsMigrateToH5Dpf(self)
124+
if h5_chunk_size is not None:
125+
self.inputs.h5_chunk_size.connect(h5_chunk_size)
118126
if dataset_size_compression_threshold is not None:
119127
self.inputs.dataset_size_compression_threshold.connect(
120128
dataset_size_compression_threshold
@@ -151,6 +159,12 @@ def _spec() -> Specification:
151159
spec = Specification(
152160
description=description,
153161
map_input_pin_spec={
162+
-7: PinSpecification(
163+
name="h5_chunk_size",
164+
type_names=["int32", "generic_data_container"],
165+
optional=True,
166+
document=r"""Size of each HDF5 chunk in kilobytes (KB). Default: 1 MB when compression is enabled; for uncompressed datasets, the default is the full dataset size x dimension.""",
167+
),
154168
-5: PinSpecification(
155169
name="dataset_size_compression_threshold",
156170
type_names=["int32", "generic_data_container"],
@@ -279,6 +293,8 @@ class InputsMigrateToH5Dpf(_Inputs):
279293
--------
280294
>>> from ansys.dpf import core as dpf
281295
>>> op = dpf.operators.result.migrate_to_h5dpf()
296+
>>> my_h5_chunk_size = int()
297+
>>> op.inputs.h5_chunk_size.connect(my_h5_chunk_size)
282298
>>> my_dataset_size_compression_threshold = int()
283299
>>> op.inputs.dataset_size_compression_threshold.connect(my_dataset_size_compression_threshold)
284300
>>> my_h5_native_compression = int()
@@ -303,6 +319,8 @@ class InputsMigrateToH5Dpf(_Inputs):
303319

304320
def __init__(self, op: Operator):
305321
super().__init__(migrate_to_h5dpf._spec().inputs, op)
322+
self._h5_chunk_size = Input(migrate_to_h5dpf._spec().input_pin(-7), -7, op, -1)
323+
self._inputs.append(self._h5_chunk_size)
306324
self._dataset_size_compression_threshold = Input(
307325
migrate_to_h5dpf._spec().input_pin(-5), -5, op, -1
308326
)
@@ -336,6 +354,27 @@ def __init__(self, op: Operator):
336354
)
337355
self._inputs.append(self._filtering_workflow)
338356

357+
@property
358+
def h5_chunk_size(self) -> Input:
359+
r"""Allows to connect h5_chunk_size input to the operator.
360+
361+
Size of each HDF5 chunk in kilobytes (KB). Default: 1 MB when compression is enabled; for uncompressed datasets, the default is the full dataset size x dimension.
362+
363+
Returns
364+
-------
365+
input:
366+
An Input instance for this pin.
367+
368+
Examples
369+
--------
370+
>>> from ansys.dpf import core as dpf
371+
>>> op = dpf.operators.result.migrate_to_h5dpf()
372+
>>> op.inputs.h5_chunk_size.connect(my_h5_chunk_size)
373+
>>> # or
374+
>>> op.inputs.h5_chunk_size(my_h5_chunk_size)
375+
"""
376+
return self._h5_chunk_size
377+
339378
@property
340379
def dataset_size_compression_threshold(self) -> Input:
341380
r"""Allows to connect dataset_size_compression_threshold input to the operator.

src/ansys/dpf/core/operators/serialization/hdf5dpf_generate_result_file.py

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@ class hdf5dpf_generate_result_file(Operator):
2121
2222
Parameters
2323
----------
24+
h5_chunk_size: int, optional
25+
Size of each HDF5 chunk in kilobytes (KB). Default: 1 MB when compression is enabled; for uncompressed datasets, the default is the full dataset size x dimension.
2426
append_mode: bool, optional
2527
Experimental: Allow appending chunked data to the file. This disables fields container content deduplication.
2628
dataset_size_compression_threshold: int, optional
@@ -55,6 +57,8 @@ class hdf5dpf_generate_result_file(Operator):
5557
>>> op = dpf.operators.serialization.hdf5dpf_generate_result_file()
5658
5759
>>> # Make input connections
60+
>>> my_h5_chunk_size = int()
61+
>>> op.inputs.h5_chunk_size.connect(my_h5_chunk_size)
5862
>>> my_append_mode = bool()
5963
>>> op.inputs.append_mode.connect(my_append_mode)
6064
>>> my_dataset_size_compression_threshold = int()
@@ -78,6 +82,7 @@ class hdf5dpf_generate_result_file(Operator):
7882
7983
>>> # Instantiate operator and connect inputs in one line
8084
>>> op = dpf.operators.serialization.hdf5dpf_generate_result_file(
85+
... h5_chunk_size=my_h5_chunk_size,
8186
... append_mode=my_append_mode,
8287
... dataset_size_compression_threshold=my_dataset_size_compression_threshold,
8388
... h5_native_compression=my_h5_native_compression,
@@ -96,6 +101,7 @@ class hdf5dpf_generate_result_file(Operator):
96101

97102
def __init__(
98103
self,
104+
h5_chunk_size=None,
99105
append_mode=None,
100106
dataset_size_compression_threshold=None,
101107
h5_native_compression=None,
@@ -114,6 +120,8 @@ def __init__(
114120
)
115121
self._inputs = InputsHdf5DpfGenerateResultFile(self)
116122
self._outputs = OutputsHdf5DpfGenerateResultFile(self)
123+
if h5_chunk_size is not None:
124+
self.inputs.h5_chunk_size.connect(h5_chunk_size)
117125
if append_mode is not None:
118126
self.inputs.append_mode.connect(append_mode)
119127
if dataset_size_compression_threshold is not None:
@@ -144,6 +152,12 @@ def _spec() -> Specification:
144152
spec = Specification(
145153
description=description,
146154
map_input_pin_spec={
155+
-7: PinSpecification(
156+
name="h5_chunk_size",
157+
type_names=["int32"],
158+
optional=True,
159+
document=r"""Size of each HDF5 chunk in kilobytes (KB). Default: 1 MB when compression is enabled; for uncompressed datasets, the default is the full dataset size x dimension.""",
160+
),
147161
-6: PinSpecification(
148162
name="append_mode",
149163
type_names=["bool"],
@@ -270,6 +284,8 @@ class InputsHdf5DpfGenerateResultFile(_Inputs):
270284
--------
271285
>>> from ansys.dpf import core as dpf
272286
>>> op = dpf.operators.serialization.hdf5dpf_generate_result_file()
287+
>>> my_h5_chunk_size = int()
288+
>>> op.inputs.h5_chunk_size.connect(my_h5_chunk_size)
273289
>>> my_append_mode = bool()
274290
>>> op.inputs.append_mode.connect(my_append_mode)
275291
>>> my_dataset_size_compression_threshold = int()
@@ -294,6 +310,10 @@ class InputsHdf5DpfGenerateResultFile(_Inputs):
294310

295311
def __init__(self, op: Operator):
296312
super().__init__(hdf5dpf_generate_result_file._spec().inputs, op)
313+
self._h5_chunk_size = Input(
314+
hdf5dpf_generate_result_file._spec().input_pin(-7), -7, op, -1
315+
)
316+
self._inputs.append(self._h5_chunk_size)
297317
self._append_mode = Input(
298318
hdf5dpf_generate_result_file._spec().input_pin(-6), -6, op, -1
299319
)
@@ -335,6 +355,27 @@ def __init__(self, op: Operator):
335355
)
336356
self._inputs.append(self._input_name2)
337357

358+
@property
359+
def h5_chunk_size(self) -> Input:
360+
r"""Allows to connect h5_chunk_size input to the operator.
361+
362+
Size of each HDF5 chunk in kilobytes (KB). Default: 1 MB when compression is enabled; for uncompressed datasets, the default is the full dataset size x dimension.
363+
364+
Returns
365+
-------
366+
input:
367+
An Input instance for this pin.
368+
369+
Examples
370+
--------
371+
>>> from ansys.dpf import core as dpf
372+
>>> op = dpf.operators.serialization.hdf5dpf_generate_result_file()
373+
>>> op.inputs.h5_chunk_size.connect(my_h5_chunk_size)
374+
>>> # or
375+
>>> op.inputs.h5_chunk_size(my_h5_chunk_size)
376+
"""
377+
return self._h5_chunk_size
378+
338379
@property
339380
def append_mode(self) -> Input:
340381
r"""Allows to connect append_mode input to the operator.
0 Bytes
Binary file not shown.
0 Bytes
Binary file not shown.
191 KB
Binary file not shown.
34.4 KB
Binary file not shown.

0 commit comments

Comments
 (0)