Skip to content

Commit 6f599ce

Browse files
PProfizigithub-actions[bot]
authored andcommitted
update generated code
1 parent 4ba6321 commit 6f599ce

File tree

6 files changed

+335
-8
lines changed

6 files changed

+335
-8
lines changed

doc/source/_static/dpf_operators.html

Lines changed: 17 additions & 6 deletions
Large diffs are not rendered by default.

src/ansys/dpf/core/operators/result/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -283,6 +283,7 @@
283283
from .reaction_force_Z import reaction_force_Z
284284
from .read_cms_rbd_file import read_cms_rbd_file
285285
from .recombine_harmonic_indeces_cyclic import recombine_harmonic_indeces_cyclic
286+
from .record_reader import record_reader
286287
from .remove_rigid_body_motion import remove_rigid_body_motion
287288
from .remove_rigid_body_motion_fc import remove_rigid_body_motion_fc
288289
from .result_provider import result_provider
Lines changed: 313 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,313 @@
1+
"""
2+
record_reader
3+
4+
Autogenerated DPF operator classes.
5+
"""
6+
7+
from __future__ import annotations
8+
from typing import TYPE_CHECKING
9+
10+
from warnings import warn
11+
from ansys.dpf.core.dpf_operator import Operator
12+
from ansys.dpf.core.inputs import Input, _Inputs
13+
from ansys.dpf.core.outputs import Output, _Outputs
14+
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
15+
from ansys.dpf.core.operators.specification import PinSpecification, Specification
16+
from ansys.dpf.core.config import Config
17+
from ansys.dpf.core.server_types import AnyServerType
18+
19+
if TYPE_CHECKING:
20+
from ansys.dpf.core.data_sources import DataSources
21+
from ansys.dpf.core.streams_container import StreamsContainer
22+
23+
24+
class record_reader(Operator):
25+
r"""Extracts a record from a file.
26+
27+
28+
Inputs
29+
------
30+
streams: StreamsContainer, optional
31+
data_sources: DataSources
32+
record_name:
33+
Name of the record that must be extracted from the file.
34+
35+
For example to read the nodal solution of the 4th set, input should be **RST::DSI::SET4::NSL**
36+
37+
The MAPDL records tree can be found in the following links:
38+
39+
- [Ansys Help - Retrieving Data from the Results File](https://ansyshelp.ansys.com/public/account/secured?returnurl=//////Views/Secured/corp/v252/en/ans_prog/datafromRST.html)
40+
41+
- [Ansys Help - Description of the Results File](https://ansyshelp.ansys.com/public/account/secured?returnurl=/Views/Secured/corp/v252/en/ans_prog/Hlp_P_INT1_2.html)
42+
43+
Outputs
44+
-------
45+
field: PropertyField or Field
46+
Output is of type property_field for integer records and of type field for double records.
47+
48+
Examples
49+
--------
50+
>>> from ansys.dpf import core as dpf
51+
52+
>>> # Instantiate operator
53+
>>> op = dpf.operators.result.record_reader()
54+
55+
>>> # Make input connections
56+
>>> my_streams = dpf.StreamsContainer()
57+
>>> op.inputs.streams.connect(my_streams)
58+
>>> my_data_sources = dpf.DataSources()
59+
>>> op.inputs.data_sources.connect(my_data_sources)
60+
>>> my_record_name = dpf.()
61+
>>> op.inputs.record_name.connect(my_record_name)
62+
63+
>>> # Instantiate operator and connect inputs in one line
64+
>>> op = dpf.operators.result.record_reader(
65+
... streams=my_streams,
66+
... data_sources=my_data_sources,
67+
... record_name=my_record_name,
68+
... )
69+
70+
>>> # Get output data
71+
>>> result_field = op.outputs.field()
72+
"""
73+
74+
def __init__(
75+
self,
76+
streams=None,
77+
data_sources=None,
78+
record_name=None,
79+
config=None,
80+
server=None,
81+
):
82+
super().__init__(
83+
name="record_reader",
84+
config=config,
85+
server=server,
86+
inputs_type=InputsRecordReader,
87+
outputs_type=OutputsRecordReader,
88+
)
89+
if streams is not None:
90+
self.inputs.streams.connect(streams)
91+
if data_sources is not None:
92+
self.inputs.data_sources.connect(data_sources)
93+
if record_name is not None:
94+
self.inputs.record_name.connect(record_name)
95+
96+
@staticmethod
97+
def _spec() -> Specification:
98+
description = r"""Extracts a record from a file.
99+
"""
100+
spec = Specification(
101+
description=description,
102+
map_input_pin_spec={
103+
3: PinSpecification(
104+
name="streams",
105+
type_names=["streams_container"],
106+
optional=True,
107+
document=r"""""",
108+
),
109+
4: PinSpecification(
110+
name="data_sources",
111+
type_names=["data_sources"],
112+
optional=False,
113+
document=r"""""",
114+
),
115+
60: PinSpecification(
116+
name="record_name",
117+
type_names=["any"],
118+
optional=False,
119+
document=r"""Name of the record that must be extracted from the file.
120+
121+
For example to read the nodal solution of the 4th set, input should be **RST::DSI::SET4::NSL**
122+
123+
The MAPDL records tree can be found in the following links:
124+
125+
- [Ansys Help - Retrieving Data from the Results File](https://ansyshelp.ansys.com/public/account/secured?returnurl=//////Views/Secured/corp/v252/en/ans_prog/datafromRST.html)
126+
127+
- [Ansys Help - Description of the Results File](https://ansyshelp.ansys.com/public/account/secured?returnurl=/Views/Secured/corp/v252/en/ans_prog/Hlp_P_INT1_2.html)""",
128+
),
129+
},
130+
map_output_pin_spec={
131+
0: PinSpecification(
132+
name="field",
133+
type_names=["property_field", "field"],
134+
optional=False,
135+
document=r"""Output is of type property_field for integer records and of type field for double records.""",
136+
),
137+
},
138+
)
139+
return spec
140+
141+
@staticmethod
142+
def default_config(server: AnyServerType = None) -> Config:
143+
"""Returns the default config of the operator.
144+
145+
This config can then be changed to the user needs and be used to
146+
instantiate the operator. The Configuration allows to customize
147+
how the operation will be processed by the operator.
148+
149+
Parameters
150+
----------
151+
server:
152+
Server with channel connected to the remote or local instance. When
153+
``None``, attempts to use the global server.
154+
155+
Returns
156+
-------
157+
config:
158+
A new Config instance equivalent to the default config for this operator.
159+
"""
160+
return Operator.default_config(name="record_reader", server=server)
161+
162+
@property
163+
def inputs(self) -> InputsRecordReader:
164+
"""Enables to connect inputs to the operator
165+
166+
Returns
167+
--------
168+
inputs:
169+
An instance of InputsRecordReader.
170+
"""
171+
return self._inputs
172+
173+
@property
174+
def outputs(self) -> OutputsRecordReader:
175+
"""Enables to get outputs of the operator by evaluating it
176+
177+
Returns
178+
--------
179+
outputs:
180+
An instance of OutputsRecordReader.
181+
"""
182+
return self._outputs
183+
184+
185+
class InputsRecordReader(_Inputs):
186+
"""Intermediate class used to connect user inputs to
187+
record_reader operator.
188+
189+
Examples
190+
--------
191+
>>> from ansys.dpf import core as dpf
192+
>>> op = dpf.operators.result.record_reader()
193+
>>> my_streams = dpf.StreamsContainer()
194+
>>> op.inputs.streams.connect(my_streams)
195+
>>> my_data_sources = dpf.DataSources()
196+
>>> op.inputs.data_sources.connect(my_data_sources)
197+
>>> my_record_name = dpf.()
198+
>>> op.inputs.record_name.connect(my_record_name)
199+
"""
200+
201+
def __init__(self, op: Operator):
202+
super().__init__(record_reader._spec().inputs, op)
203+
self._streams: Input[StreamsContainer] = Input(
204+
record_reader._spec().input_pin(3), 3, op, -1
205+
)
206+
self._inputs.append(self._streams)
207+
self._data_sources: Input[DataSources] = Input(
208+
record_reader._spec().input_pin(4), 4, op, -1
209+
)
210+
self._inputs.append(self._data_sources)
211+
self._record_name: Input = Input(
212+
record_reader._spec().input_pin(60), 60, op, -1
213+
)
214+
self._inputs.append(self._record_name)
215+
216+
@property
217+
def streams(self) -> Input[StreamsContainer]:
218+
r"""Allows to connect streams input to the operator.
219+
220+
Returns
221+
-------
222+
input:
223+
An Input instance for this pin.
224+
225+
Examples
226+
--------
227+
>>> from ansys.dpf import core as dpf
228+
>>> op = dpf.operators.result.record_reader()
229+
>>> op.inputs.streams.connect(my_streams)
230+
>>> # or
231+
>>> op.inputs.streams(my_streams)
232+
"""
233+
return self._streams
234+
235+
@property
236+
def data_sources(self) -> Input[DataSources]:
237+
r"""Allows to connect data_sources input to the operator.
238+
239+
Returns
240+
-------
241+
input:
242+
An Input instance for this pin.
243+
244+
Examples
245+
--------
246+
>>> from ansys.dpf import core as dpf
247+
>>> op = dpf.operators.result.record_reader()
248+
>>> op.inputs.data_sources.connect(my_data_sources)
249+
>>> # or
250+
>>> op.inputs.data_sources(my_data_sources)
251+
"""
252+
return self._data_sources
253+
254+
@property
255+
def record_name(self) -> Input:
256+
r"""Allows to connect record_name input to the operator.
257+
258+
Name of the record that must be extracted from the file.
259+
260+
For example to read the nodal solution of the 4th set, input should be **RST::DSI::SET4::NSL**
261+
262+
The MAPDL records tree can be found in the following links:
263+
264+
- [Ansys Help - Retrieving Data from the Results File](https://ansyshelp.ansys.com/public/account/secured?returnurl=//////Views/Secured/corp/v252/en/ans_prog/datafromRST.html)
265+
266+
- [Ansys Help - Description of the Results File](https://ansyshelp.ansys.com/public/account/secured?returnurl=/Views/Secured/corp/v252/en/ans_prog/Hlp_P_INT1_2.html)
267+
268+
Returns
269+
-------
270+
input:
271+
An Input instance for this pin.
272+
273+
Examples
274+
--------
275+
>>> from ansys.dpf import core as dpf
276+
>>> op = dpf.operators.result.record_reader()
277+
>>> op.inputs.record_name.connect(my_record_name)
278+
>>> # or
279+
>>> op.inputs.record_name(my_record_name)
280+
"""
281+
return self._record_name
282+
283+
284+
class OutputsRecordReader(_Outputs):
285+
"""Intermediate class used to get outputs from
286+
record_reader operator.
287+
288+
Examples
289+
--------
290+
>>> from ansys.dpf import core as dpf
291+
>>> op = dpf.operators.result.record_reader()
292+
>>> # Connect inputs : op.inputs. ...
293+
>>> result_field = op.outputs.field()
294+
"""
295+
296+
def __init__(self, op: Operator):
297+
super().__init__(record_reader._spec().outputs, op)
298+
self.field_as_property_field = Output(
299+
_modify_output_spec_with_one_type(
300+
record_reader._spec().output_pin(0), "property_field"
301+
),
302+
0,
303+
op,
304+
)
305+
self._outputs.append(self.field_as_property_field)
306+
self.field_as_field = Output(
307+
_modify_output_spec_with_one_type(
308+
record_reader._spec().output_pin(0), "field"
309+
),
310+
0,
311+
op,
312+
)
313+
self._outputs.append(self.field_as_field)

src/ansys/dpf/core/operators/serialization/field_to_csv.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,8 @@
2121

2222

2323
class field_to_csv(Operator):
24-
r"""Exports a field or a fields container into a csv file
24+
r"""Exports a field or a fields container into a csv file. Currently only
25+
homogenous Fields Definition of Fields Container are supported.
2526
2627
2728
Inputs
@@ -83,7 +84,8 @@ def __init__(
8384

8485
@staticmethod
8586
def _spec() -> Specification:
86-
description = r"""Exports a field or a fields container into a csv file
87+
description = r"""Exports a field or a fields container into a csv file. Currently only
88+
homogenous Fields Definition of Fields Container are supported.
8789
"""
8890
spec = Specification(
8991
description=description,
0 Bytes
Binary file not shown.
0 Bytes
Binary file not shown.

0 commit comments

Comments
 (0)