diff --git a/doc/source/_static/dpf_operators.html b/doc/source/_static/dpf_operators.html
index fd1a4a5fc2d..970ccc6fa9e 100644
--- a/doc/source/_static/dpf_operators.html
+++ b/doc/source/_static/dpf_operators.html
@@ -2099,7 +2099,7 @@
Configurating operators
2 for normalized by the max at a given time step of the first entry or residuals depending on the reference field option,
3 for normalized by the max over all time steps of the first entry or residuals depending on the reference field option" types="int32" optional="true"ellipsis="false"inplace="false">Outputs
Configurations
Scripting
result: add rigid body motion (fields container)
Inputs
Outputs
Configurations
Scripting
utility: merge time freq supports
Inputs
Outputs
Configurations
Scripting
min_max: incremental over fields container
Inputs
Outputs
Configurations
Scripting
scoping: split on property type
Inputs
Outputs
Configurations
Scripting
utility: overlap fields
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal elemental (field)
Inputs
Outputs
Configurations
Scripting
utility: change shell layers
Inputs
Outputs
Configurations
Scripting
utility: merge meshes
Inputs
Outputs
Configurations
Scripting
utility: merge fields
Inputs
Outputs
Configurations
Scripting
utility: merge weighted fields
Inputs
Outputs
Configurations
Scripting
utility: merge fc to fc field matrices
Inputs
Outputs
Configurations
Scripting
filter: high pass (field)
Inputs
Outputs
Configurations
Scripting
min_max: max by component
Inputs
Outputs
Configurations
Scripting
utility: weighted merge fields by label
Inputs
Outputs
Configurations
Scripting
utility: merge fields by label
Inputs
Outputs
Configurations
Scripting
averaging: elemental to elemental nodal (field)
Inputs
Outputs
Configurations
Scripting
utility: merge meshes containers
Inputs
Outputs
Configurations
Scripting
min_max: min max by entity
Inputs
Outputs
Configurations
Scripting
utility: merge collections
Inputs
Outputs
Configurations
Scripting
logic: merge solid and shell fields
Inputs
Outputs
Configurations
Scripting
min_max: min over time
Inputs
Outputs
Configurations
Scripting
geo: element nodal contribution
Inputs
Outputs
Configurations
Scripting
min_max: over label
Inputs
Outputs
Configurations
Scripting
min_max: min by component
Inputs
Outputs
Configurations
Scripting
serialization: serializer to string
Inputs
Outputs
Configurations
Scripting
serialization: deserializer
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded velocity
Inputs
Outputs
Configurations
Scripting
mesh: skin
Inputs
Outputs
Configurations
Scripting
utility: split in for each range
Inputs
Outputs
Configurations
Scripting
utility: incremental field
Inputs
Outputs
Configurations
Scripting
utility: incremental fields container
Inputs
Outputs
Configurations
Scripting
geo: rotate (fields container)
Inputs
Outputs
Configurations
Scripting
utility: incremental property field
Inputs
Outputs
Configurations
Scripting
utility: incremental mesh
Inputs
Outputs
Configurations
Scripting
mesh: points from coordinates
Inputs
Outputs
Configurations
Scripting
utility: incremental concantenate as fields container.
Inputs
Outputs
Configurations
Scripting
utility: make producer consumer for each iterator
Inputs
Outputs
Configurations
Scripting
utility: producer consumer for each
Inputs
Outputs
Configurations
Scripting
averaging: extend to mid nodes (field)
Inputs
Outputs
Configurations
Scripting
invariant: eigen vectors (on fields container)
Inputs
Outputs
Configurations
Scripting
mesh: mesh get attribute
Inputs
Outputs
Configurations
Scripting
metadata: time freq support get attribute
Inputs
Outputs
Configurations
Scripting
utility: set attribute
Inputs
Outputs
Configurations
Scripting
utility: field get attribute
Inputs
Outputs
Configurations
Scripting
min_max: time of min
Inputs
Outputs
Configurations
Scripting
min_max: max over phase
Inputs
Outputs
Configurations
Scripting
min_max: phase of max
Inputs
Outputs
Configurations
Scripting
utility: voigt to standard strains
Inputs
Outputs
Configurations
Scripting
utility: voigt to standard strains (fields container)
Inputs
Outputs
Configurations
Scripting
min_max: incremental over field
Inputs
Outputs
Configurations
Scripting
utility: workflow to pydpf generator
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (timefreq)
Inputs
Outputs
Configurations
Scripting
logic: same string fields?
Inputs
Outputs
Configurations
Scripting
logic: same meshes?
Inputs
Outputs
Configurations
Scripting
logic: same fields?
Inputs
Outputs
Configurations
Scripting
logic: fields included?
Inputs
Outputs
Configurations
Scripting
logic: same fields container?
Inputs
Outputs
Configurations
Scripting
filter: high pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: high pass (timescoping)
Inputs
Outputs
Configurations
Scripting
filter: high pass (fields container)
Inputs
Outputs
Configurations
Scripting
filter: low pass (field)
Inputs
Outputs
Configurations
Scripting
filter: low pass (fields container)
Inputs
Outputs
Configurations
Scripting
filter: band pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: band pass (timefreq)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (timescoping)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (fields container)
Inputs
Outputs
Configurations
Scripting
serialization: csv to field
Inputs
Outputs
Configurations
Scripting
result: members in compression not certified
Inputs
Outputs
Configurations
Scripting
result: members in bending not certified
Inputs
Outputs
Configurations
Scripting
result: members in linear compression bending not certified
Inputs
Outputs
Configurations
Scripting
invariant: convertnum nod to bcs
Inputs
Outputs
Configurations
Scripting
geo: rotate
Inputs
Outputs
Configurations
Scripting
logic: enrich materials
Inputs
Outputs
Configurations
Scripting
serialization: data tree to json
Inputs
Outputs
Configurations
Scripting
serialization: data tree to txt
Inputs
Outputs
Configurations
Scripting
serialization: json to data tree
Inputs
Outputs
Configurations
Scripting
averaging: nodal difference (fields container)
Inputs
Outputs
Configurations
Scripting
logic: descending sort
Inputs
Outputs
Configurations
Scripting
logic: ascending sort (fields container)
Inputs
Outputs
Configurations
Scripting
logic: descending sort (fields container)
Inputs
Outputs
Configurations
Scripting
serialization: import symbolic workflow
Inputs
Outputs
Configurations
Scripting
filter: filtering max over time workflow
Inputs
Outputs
Configurations
Scripting
metadata: integrate over time freq
Inputs
Outputs
Configurations
Scripting
averaging: nodal difference (field)
Inputs
Outputs
Configurations
Scripting
result: compute stress YZ
Inputs
Outputs
Configurations
Scripting
logic: splitter::data_sources
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental to nodal (field)
Outputs
Configurations
Scripting
result: add rigid body motion (fields container)
Inputs
Outputs
Configurations
Scripting
utility: merge time freq supports
Inputs
Outputs
Configurations
Scripting
min_max: incremental over fields container
Inputs
Outputs
Configurations
Scripting
scoping: split on property type
Inputs
Outputs
Configurations
Scripting
utility: overlap fields
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal elemental (field)
Inputs
Outputs
Configurations
Scripting
utility: change shell layers
Inputs
Outputs
Configurations
Scripting
utility: merge meshes
Inputs
Outputs
Configurations
Scripting
utility: merge fields
Inputs
Outputs
Configurations
Scripting
utility: merge weighted fields
Inputs
Outputs
Configurations
Scripting
utility: merge fc to fc field matrices
Inputs
Outputs
Configurations
Scripting
filter: high pass (field)
Inputs
Outputs
Configurations
Scripting
min_max: max by component
Inputs
Outputs
Configurations
Scripting
utility: weighted merge fields by label
Inputs
Outputs
Configurations
Scripting
utility: merge fields by label
Inputs
Outputs
Configurations
Scripting
averaging: elemental to elemental nodal (field)
Inputs
Outputs
Configurations
Scripting
utility: merge meshes containers
Inputs
Outputs
Configurations
Scripting
min_max: min max by entity
Inputs
Outputs
Configurations
Scripting
utility: merge collections
Inputs
Outputs
Configurations
Scripting
logic: merge solid and shell fields
Inputs
Outputs
Configurations
Scripting
min_max: min over time
Inputs
Outputs
Configurations
Scripting
geo: element nodal contribution
Inputs
Outputs
Configurations
Scripting
min_max: over label
Inputs
Outputs
Configurations
Scripting
min_max: min by component
Inputs
Outputs
Configurations
Scripting
serialization: serializer to string
Inputs
Outputs
Configurations
Scripting
serialization: deserializer
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded velocity
Inputs
Outputs
Configurations
Scripting
mesh: skin
Inputs
Outputs
Configurations
Scripting
utility: split in for each range
Inputs
Outputs
Configurations
Scripting
utility: incremental field
Inputs
Outputs
Configurations
Scripting
utility: incremental fields container
Inputs
Outputs
Configurations
Scripting
geo: rotate (fields container)
Inputs
Outputs
Configurations
Scripting
utility: incremental property field
Inputs
Outputs
Configurations
Scripting
utility: incremental mesh
Inputs
Outputs
Configurations
Scripting
mesh: points from coordinates
Inputs
Outputs
Configurations
Scripting
utility: incremental concantenate as fields container.
Inputs
Outputs
Configurations
Scripting
utility: make producer consumer for each iterator
Inputs
Outputs
Configurations
Scripting
utility: producer consumer for each
Inputs
Outputs
Configurations
Scripting
averaging: extend to mid nodes (field)
Inputs
Outputs
Configurations
Scripting
invariant: eigen vectors (on fields container)
Inputs
Outputs
Configurations
Scripting
mesh: mesh get attribute
Inputs
Outputs
Configurations
Scripting
metadata: time freq support get attribute
Inputs
Outputs
Configurations
Scripting
utility: set attribute
Inputs
Outputs
Configurations
Scripting
utility: field get attribute
Inputs
Outputs
Configurations
Scripting
min_max: time of min
Inputs
Outputs
Configurations
Scripting
min_max: max over phase
Inputs
Outputs
Configurations
Scripting
min_max: phase of max
Inputs
Outputs
Configurations
Scripting
utility: voigt to standard strains
Inputs
Outputs
Configurations
Scripting
utility: voigt to standard strains (fields container)
Inputs
Outputs
Configurations
Scripting
min_max: incremental over field
Inputs
Outputs
Configurations
Scripting
utility: workflow to pydpf generator
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (timefreq)
Inputs
Outputs
Configurations
Scripting
logic: same string fields?
Inputs
Outputs
Configurations
Scripting
logic: same meshes?
Inputs
Outputs
Configurations
Scripting
logic: same fields?
Inputs
Outputs
Configurations
Scripting
logic: fields included?
Inputs
Outputs
Configurations
Scripting
logic: same fields container?
Inputs
Outputs
Configurations
Scripting
filter: high pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: high pass (timescoping)
Inputs
Outputs
Configurations
Scripting
filter: high pass (fields container)
Inputs
Outputs
Configurations
Scripting
filter: low pass (field)
Inputs
Outputs
Configurations
Scripting
filter: low pass (fields container)
Inputs
Outputs
Configurations
Scripting
filter: band pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: band pass (timefreq)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (scoping)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (timescoping)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (fields container)
Inputs
Outputs
Configurations
Scripting
serialization: csv to field
Inputs
Outputs
Configurations
Scripting
result: members in compression not certified
Inputs
Outputs
Configurations
Scripting
result: members in bending not certified
Inputs
Outputs
Configurations
Scripting
result: members in linear compression bending not certified
Inputs
Outputs
Configurations
Scripting
invariant: convertnum nod to bcs
Inputs
Outputs
Configurations
Scripting
geo: rotate
Inputs
Outputs
Configurations
Scripting
logic: enrich materials
Inputs
Outputs
Configurations
Scripting
serialization: data tree to json
Inputs
Outputs
Configurations
Scripting
serialization: data tree to txt
Inputs
Outputs
Configurations
Scripting
serialization: json to data tree
Inputs
Outputs
Configurations
Scripting
averaging: nodal difference (fields container)
Inputs
Outputs
Configurations
Scripting
logic: descending sort
Inputs
Outputs
Configurations
Scripting
logic: ascending sort (fields container)
Inputs
Outputs
Configurations
Scripting
logic: descending sort (fields container)
Inputs
Outputs
Configurations
Scripting
serialization: import symbolic workflow
Inputs
Outputs
Configurations
Scripting
filter: filtering max over time workflow
Inputs
Outputs
Configurations
Scripting
metadata: integrate over time freq
Inputs
Outputs
Configurations
Scripting
averaging: nodal difference (field)
Inputs
Outputs
Configurations
Scripting
result: compute stress YZ
Inputs
Outputs
Configurations
Scripting
logic: splitter::data_sources
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: elemental nodal to nodal (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental to nodal (field)
>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.mesh.from_data_model_to_dpf()
+
+ >>> # Make input connections
+ >>> my_mesh = dpf.GenericDataContainer()
+ >>> op.inputs.mesh.connect(my_mesh)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.mesh.from_data_model_to_dpf(
+ ... mesh=my_mesh,
+ ... )
+
+ >>> # Get output data
+ >>> result_meshes_op = op.outputs.meshes_op()
+ """
+
+ def __init__(self, mesh=None, config=None, server=None):
+ super().__init__(
+ name="mesh::from_data_model_to_dpf", config=config, server=server
+ )
+ self._inputs = InputsFromDataModelToDpf(self)
+ self._outputs = OutputsFromDataModelToDpf(self)
+ if mesh is not None:
+ self.inputs.mesh.connect(mesh)
+
+ @staticmethod
+ def _spec():
+ description = """Translate a data model MeshAssembly or MeshPart into a DPF
+ MeshesContainer."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="mesh",
+ type_names=["generic_data_container"],
+ optional=False,
+ document="""Meshassembly or meshpart""",
+ name_derived_class=["mesh_assembly"],
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="meshes_op",
+ type_names=["meshes_container"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(
+ name="mesh::from_data_model_to_dpf", server=server
+ )
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsFromDataModelToDpf
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsFromDataModelToDpf
+ """
+ return super().outputs
+
+
+class InputsFromDataModelToDpf(_Inputs):
+ """Intermediate class used to connect user inputs to
+ from_data_model_to_dpf operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.mesh.from_data_model_to_dpf()
+ >>> my_mesh = dpf.GenericDataContainer()
+ >>> op.inputs.mesh.connect(my_mesh)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(from_data_model_to_dpf._spec().inputs, op)
+ self._mesh = Input(from_data_model_to_dpf._spec().input_pin(0), 0, op, -1)
+ self._inputs.append(self._mesh)
+
+ @property
+ def mesh(self):
+ """Allows to connect mesh input to the operator.
+
+ Meshassembly or meshpart
+
+ Parameters
+ ----------
+ my_mesh : GenericDataContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.mesh.from_data_model_to_dpf()
+ >>> op.inputs.mesh.connect(my_mesh)
+ >>> # or
+ >>> op.inputs.mesh(my_mesh)
+ """
+ return self._mesh
+
+
+class OutputsFromDataModelToDpf(_Outputs):
+ """Intermediate class used to get outputs from
+ from_data_model_to_dpf operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.mesh.from_data_model_to_dpf()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_meshes_op = op.outputs.meshes_op()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(from_data_model_to_dpf._spec().outputs, op)
+ self._meshes_op = Output(from_data_model_to_dpf._spec().output_pin(0), 0, op)
+ self._outputs.append(self._meshes_op)
+
+ @property
+ def meshes_op(self):
+ """Allows to get meshes_op output of the operator
+
+ Returns
+ ----------
+ my_meshes_op : MeshesContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.mesh.from_data_model_to_dpf()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_meshes_op = op.outputs.meshes_op()
+ """ # noqa: E501
+ return self._meshes_op
diff --git a/src/ansys/dpf/core/operators/mesh/mesh_provider.py b/src/ansys/dpf/core/operators/mesh/mesh_provider.py
index 42a6bd319f9..b3be63325d1 100644
--- a/src/ansys/dpf/core/operators/mesh/mesh_provider.py
+++ b/src/ansys/dpf/core/operators/mesh/mesh_provider.py
@@ -12,49 +12,11 @@
class mesh_provider(Operator):
- """Reads a mesh from result files.
+ """Gets a mesh region from a data model data sources.
Parameters
----------
- time_scoping : int, optional
- Optional time/frequency set id of the mesh,
- supported for adaptative meshes.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
data_sources : DataSources
- Result file path container, used if no
- streams are set
- read_cyclic : int, optional
- If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).
- region_scoping : Scoping or int, optional
- Region id (integer) or vector of region ids
- with one entity (vector) or region
- scoping with one id (scoping) (region
- corresponds to zone for fluid results
- or part for lsdyna results).
- laziness : DataTree, optional
- Configurate whether lazy evaluation can be
- performed and to what extent.
- supported attributes are: -
- "num_named_selections"->num named
- selection to read (-1 is all, int32,
- default si -1), careful: the other
- named selections will not be
- available, use mesh_property_provider
- operator to read them. - all mesh
- property fields "mat",
- "named_selection",
- "apdl_element_type", "section"-> if
- set to 1 these properties will not be
- read and a workflow will be bounded
- to the properties to be evaluated on
- demand, with 0 they are read (default
- is 0). - "all_available_properties"
- option set to 0 will return all
- possible properties
Examples
@@ -65,133 +27,43 @@ class mesh_provider(Operator):
>>> op = dpf.operators.mesh.mesh_provider()
>>> # Make input connections
- >>> my_time_scoping = int()
- >>> op.inputs.time_scoping.connect(my_time_scoping)
- >>> my_streams_container = dpf.StreamsContainer()
- >>> op.inputs.streams_container.connect(my_streams_container)
>>> my_data_sources = dpf.DataSources()
>>> op.inputs.data_sources.connect(my_data_sources)
- >>> my_read_cyclic = int()
- >>> op.inputs.read_cyclic.connect(my_read_cyclic)
- >>> my_region_scoping = dpf.Scoping()
- >>> op.inputs.region_scoping.connect(my_region_scoping)
- >>> my_laziness = dpf.DataTree()
- >>> op.inputs.laziness.connect(my_laziness)
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.mesh.mesh_provider(
- ... time_scoping=my_time_scoping,
- ... streams_container=my_streams_container,
... data_sources=my_data_sources,
- ... read_cyclic=my_read_cyclic,
- ... region_scoping=my_region_scoping,
- ... laziness=my_laziness,
... )
>>> # Get output data
- >>> result_mesh = op.outputs.mesh()
+ >>> result_meshs = op.outputs.meshs()
"""
- def __init__(
- self,
- time_scoping=None,
- streams_container=None,
- data_sources=None,
- read_cyclic=None,
- region_scoping=None,
- laziness=None,
- config=None,
- server=None,
- ):
- super().__init__(name="mesh_provider", config=config, server=server)
+ def __init__(self, data_sources=None, config=None, server=None):
+ super().__init__(
+ name="ds_mechdb::mechdb::mesh_provider", config=config, server=server
+ )
self._inputs = InputsMeshProvider(self)
self._outputs = OutputsMeshProvider(self)
- if time_scoping is not None:
- self.inputs.time_scoping.connect(time_scoping)
- if streams_container is not None:
- self.inputs.streams_container.connect(streams_container)
if data_sources is not None:
self.inputs.data_sources.connect(data_sources)
- if read_cyclic is not None:
- self.inputs.read_cyclic.connect(read_cyclic)
- if region_scoping is not None:
- self.inputs.region_scoping.connect(region_scoping)
- if laziness is not None:
- self.inputs.laziness.connect(laziness)
@staticmethod
def _spec():
- description = """Reads a mesh from result files."""
+ description = """Gets a mesh region from a data model data sources."""
spec = Specification(
description=description,
map_input_pin_spec={
- 0: PinSpecification(
- name="time_scoping",
- type_names=["int32"],
- optional=True,
- document="""Optional time/frequency set id of the mesh,
- supported for adaptative meshes.""",
- ),
- 3: PinSpecification(
- name="streams_container",
- type_names=["streams_container"],
- optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
- ),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
- ),
- 14: PinSpecification(
- name="read_cyclic",
- type_names=["enum dataProcessing::ECyclicReading", "int32"],
- optional=True,
- document="""If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).""",
- ),
- 25: PinSpecification(
- name="region_scoping",
- type_names=["scoping", "int32", "vector"],
- optional=True,
- document="""Region id (integer) or vector of region ids
- with one entity (vector) or region
- scoping with one id (scoping) (region
- corresponds to zone for fluid results
- or part for lsdyna results).""",
- ),
- 200: PinSpecification(
- name="laziness",
- type_names=["abstract_data_tree"],
- optional=True,
- document="""Configurate whether lazy evaluation can be
- performed and to what extent.
- supported attributes are: -
- "num_named_selections"->num named
- selection to read (-1 is all, int32,
- default si -1), careful: the other
- named selections will not be
- available, use mesh_property_provider
- operator to read them. - all mesh
- property fields "mat",
- "named_selection",
- "apdl_element_type", "section"-> if
- set to 1 these properties will not be
- read and a workflow will be bounded
- to the properties to be evaluated on
- demand, with 0 they are read (default
- is 0). - "all_available_properties"
- option set to 0 will return all
- possible properties""",
+ document="""""",
),
},
map_output_pin_spec={
0: PinSpecification(
- name="mesh",
+ name="meshs",
type_names=["abstract_meshed_region"],
optional=False,
document="""""",
@@ -214,7 +86,9 @@ def default_config(server=None):
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
"""
- return Operator.default_config(name="mesh_provider", server=server)
+ return Operator.default_config(
+ name="ds_mechdb::mechdb::mesh_provider", server=server
+ )
@property
def inputs(self):
@@ -245,84 +119,19 @@ class InputsMeshProvider(_Inputs):
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_provider()
- >>> my_time_scoping = int()
- >>> op.inputs.time_scoping.connect(my_time_scoping)
- >>> my_streams_container = dpf.StreamsContainer()
- >>> op.inputs.streams_container.connect(my_streams_container)
>>> my_data_sources = dpf.DataSources()
>>> op.inputs.data_sources.connect(my_data_sources)
- >>> my_read_cyclic = int()
- >>> op.inputs.read_cyclic.connect(my_read_cyclic)
- >>> my_region_scoping = dpf.Scoping()
- >>> op.inputs.region_scoping.connect(my_region_scoping)
- >>> my_laziness = dpf.DataTree()
- >>> op.inputs.laziness.connect(my_laziness)
"""
def __init__(self, op: Operator):
super().__init__(mesh_provider._spec().inputs, op)
- self._time_scoping = Input(mesh_provider._spec().input_pin(0), 0, op, -1)
- self._inputs.append(self._time_scoping)
- self._streams_container = Input(mesh_provider._spec().input_pin(3), 3, op, -1)
- self._inputs.append(self._streams_container)
self._data_sources = Input(mesh_provider._spec().input_pin(4), 4, op, -1)
self._inputs.append(self._data_sources)
- self._read_cyclic = Input(mesh_provider._spec().input_pin(14), 14, op, -1)
- self._inputs.append(self._read_cyclic)
- self._region_scoping = Input(mesh_provider._spec().input_pin(25), 25, op, -1)
- self._inputs.append(self._region_scoping)
- self._laziness = Input(mesh_provider._spec().input_pin(200), 200, op, -1)
- self._inputs.append(self._laziness)
-
- @property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Optional time/frequency set id of the mesh,
- supported for adaptative meshes.
-
- Parameters
- ----------
- my_time_scoping : int
-
- Examples
- --------
- >>> from ansys.dpf import core as dpf
- >>> op = dpf.operators.mesh.mesh_provider()
- >>> op.inputs.time_scoping.connect(my_time_scoping)
- >>> # or
- >>> op.inputs.time_scoping(my_time_scoping)
- """
- return self._time_scoping
-
- @property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
-
- Result file container allowed to be kept open
- to cache data
-
- Parameters
- ----------
- my_streams_container : StreamsContainer
-
- Examples
- --------
- >>> from ansys.dpf import core as dpf
- >>> op = dpf.operators.mesh.mesh_provider()
- >>> op.inputs.streams_container.connect(my_streams_container)
- >>> # or
- >>> op.inputs.streams_container(my_streams_container)
- """
- return self._streams_container
@property
def data_sources(self):
"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
-
Parameters
----------
my_data_sources : DataSources
@@ -337,90 +146,6 @@ def data_sources(self):
"""
return self._data_sources
- @property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
-
- If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).
-
- Parameters
- ----------
- my_read_cyclic : int
-
- Examples
- --------
- >>> from ansys.dpf import core as dpf
- >>> op = dpf.operators.mesh.mesh_provider()
- >>> op.inputs.read_cyclic.connect(my_read_cyclic)
- >>> # or
- >>> op.inputs.read_cyclic(my_read_cyclic)
- """
- return self._read_cyclic
-
- @property
- def region_scoping(self):
- """Allows to connect region_scoping input to the operator.
-
- Region id (integer) or vector of region ids
- with one entity (vector) or region
- scoping with one id (scoping) (region
- corresponds to zone for fluid results
- or part for lsdyna results).
-
- Parameters
- ----------
- my_region_scoping : Scoping or int
-
- Examples
- --------
- >>> from ansys.dpf import core as dpf
- >>> op = dpf.operators.mesh.mesh_provider()
- >>> op.inputs.region_scoping.connect(my_region_scoping)
- >>> # or
- >>> op.inputs.region_scoping(my_region_scoping)
- """
- return self._region_scoping
-
- @property
- def laziness(self):
- """Allows to connect laziness input to the operator.
-
- Configurate whether lazy evaluation can be
- performed and to what extent.
- supported attributes are: -
- "num_named_selections"->num named
- selection to read (-1 is all, int32,
- default si -1), careful: the other
- named selections will not be
- available, use mesh_property_provider
- operator to read them. - all mesh
- property fields "mat",
- "named_selection",
- "apdl_element_type", "section"-> if
- set to 1 these properties will not be
- read and a workflow will be bounded
- to the properties to be evaluated on
- demand, with 0 they are read (default
- is 0). - "all_available_properties"
- option set to 0 will return all
- possible properties
-
- Parameters
- ----------
- my_laziness : DataTree
-
- Examples
- --------
- >>> from ansys.dpf import core as dpf
- >>> op = dpf.operators.mesh.mesh_provider()
- >>> op.inputs.laziness.connect(my_laziness)
- >>> # or
- >>> op.inputs.laziness(my_laziness)
- """
- return self._laziness
-
class OutputsMeshProvider(_Outputs):
"""Intermediate class used to get outputs from
@@ -431,27 +156,27 @@ class OutputsMeshProvider(_Outputs):
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_provider()
>>> # Connect inputs : op.inputs. ...
- >>> result_mesh = op.outputs.mesh()
+ >>> result_meshs = op.outputs.meshs()
"""
def __init__(self, op: Operator):
super().__init__(mesh_provider._spec().outputs, op)
- self._mesh = Output(mesh_provider._spec().output_pin(0), 0, op)
- self._outputs.append(self._mesh)
+ self._meshs = Output(mesh_provider._spec().output_pin(0), 0, op)
+ self._outputs.append(self._meshs)
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def meshs(self):
+ """Allows to get meshs output of the operator
Returns
----------
- my_mesh : MeshedRegion
+ my_meshs : MeshedRegion
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_provider()
>>> # Connect inputs : op.inputs. ...
- >>> result_mesh = op.outputs.mesh()
+ >>> result_meshs = op.outputs.meshs()
""" # noqa: E501
- return self._mesh
+ return self._meshs
diff --git a/src/ansys/dpf/core/operators/mesh/meshes_provider.py b/src/ansys/dpf/core/operators/mesh/meshes_provider.py
index 7763366b9c4..d3a5b3cea37 100644
--- a/src/ansys/dpf/core/operators/mesh/meshes_provider.py
+++ b/src/ansys/dpf/core/operators/mesh/meshes_provider.py
@@ -12,29 +12,11 @@
class meshes_provider(Operator):
- """Reads meshes from result files. Meshes can be spatially or temporally
- varying.
+ """Gets a meshes_op container from a data model data sources.
Parameters
----------
- time_scoping : Scoping or int, optional
- Time/frequency set ids required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
data_sources : DataSources
- Result file path container, used if no
- streams are set
- read_cyclic : int, optional
- If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).
- region_scoping : Scoping or int, optional
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
Examples
@@ -45,103 +27,43 @@ class meshes_provider(Operator):
>>> op = dpf.operators.mesh.meshes_provider()
>>> # Make input connections
- >>> my_time_scoping = dpf.Scoping()
- >>> op.inputs.time_scoping.connect(my_time_scoping)
- >>> my_streams_container = dpf.StreamsContainer()
- >>> op.inputs.streams_container.connect(my_streams_container)
>>> my_data_sources = dpf.DataSources()
>>> op.inputs.data_sources.connect(my_data_sources)
- >>> my_read_cyclic = int()
- >>> op.inputs.read_cyclic.connect(my_read_cyclic)
- >>> my_region_scoping = dpf.Scoping()
- >>> op.inputs.region_scoping.connect(my_region_scoping)
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.mesh.meshes_provider(
- ... time_scoping=my_time_scoping,
- ... streams_container=my_streams_container,
... data_sources=my_data_sources,
- ... read_cyclic=my_read_cyclic,
- ... region_scoping=my_region_scoping,
... )
>>> # Get output data
- >>> result_meshes = op.outputs.meshes()
+ >>> result_meshes_op = op.outputs.meshes_op()
"""
- def __init__(
- self,
- time_scoping=None,
- streams_container=None,
- data_sources=None,
- read_cyclic=None,
- region_scoping=None,
- config=None,
- server=None,
- ):
- super().__init__(name="meshes_provider", config=config, server=server)
+ def __init__(self, data_sources=None, config=None, server=None):
+ super().__init__(
+ name="mapdl_live_dm::db_live::meshes_provider", config=config, server=server
+ )
self._inputs = InputsMeshesProvider(self)
self._outputs = OutputsMeshesProvider(self)
- if time_scoping is not None:
- self.inputs.time_scoping.connect(time_scoping)
- if streams_container is not None:
- self.inputs.streams_container.connect(streams_container)
if data_sources is not None:
self.inputs.data_sources.connect(data_sources)
- if read_cyclic is not None:
- self.inputs.read_cyclic.connect(read_cyclic)
- if region_scoping is not None:
- self.inputs.region_scoping.connect(region_scoping)
@staticmethod
def _spec():
- description = """Reads meshes from result files. Meshes can be spatially or temporally
- varying."""
+ description = """Gets a meshes_op container from a data model data sources."""
spec = Specification(
description=description,
map_input_pin_spec={
- 0: PinSpecification(
- name="time_scoping",
- type_names=["scoping", "vector", "int32"],
- optional=True,
- document="""Time/frequency set ids required in output.""",
- ),
- 3: PinSpecification(
- name="streams_container",
- type_names=["streams_container"],
- optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
- ),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
- ),
- 14: PinSpecification(
- name="read_cyclic",
- type_names=["enum dataProcessing::ECyclicReading", "int32"],
- optional=True,
- document="""If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).""",
- ),
- 25: PinSpecification(
- name="region_scoping",
- type_names=["scoping", "int32", "vector"],
- optional=True,
- document="""Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).""",
+ document="""""",
),
},
map_output_pin_spec={
0: PinSpecification(
- name="meshes",
+ name="meshes_op",
type_names=["meshes_container"],
optional=False,
document="""""",
@@ -164,7 +86,9 @@ def default_config(server=None):
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
"""
- return Operator.default_config(name="meshes_provider", server=server)
+ return Operator.default_config(
+ name="mapdl_live_dm::db_live::meshes_provider", server=server
+ )
@property
def inputs(self):
@@ -195,79 +119,19 @@ class InputsMeshesProvider(_Inputs):
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.meshes_provider()
- >>> my_time_scoping = dpf.Scoping()
- >>> op.inputs.time_scoping.connect(my_time_scoping)
- >>> my_streams_container = dpf.StreamsContainer()
- >>> op.inputs.streams_container.connect(my_streams_container)
>>> my_data_sources = dpf.DataSources()
>>> op.inputs.data_sources.connect(my_data_sources)
- >>> my_read_cyclic = int()
- >>> op.inputs.read_cyclic.connect(my_read_cyclic)
- >>> my_region_scoping = dpf.Scoping()
- >>> op.inputs.region_scoping.connect(my_region_scoping)
"""
def __init__(self, op: Operator):
super().__init__(meshes_provider._spec().inputs, op)
- self._time_scoping = Input(meshes_provider._spec().input_pin(0), 0, op, -1)
- self._inputs.append(self._time_scoping)
- self._streams_container = Input(meshes_provider._spec().input_pin(3), 3, op, -1)
- self._inputs.append(self._streams_container)
self._data_sources = Input(meshes_provider._spec().input_pin(4), 4, op, -1)
self._inputs.append(self._data_sources)
- self._read_cyclic = Input(meshes_provider._spec().input_pin(14), 14, op, -1)
- self._inputs.append(self._read_cyclic)
- self._region_scoping = Input(meshes_provider._spec().input_pin(25), 25, op, -1)
- self._inputs.append(self._region_scoping)
-
- @property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/frequency set ids required in output.
-
- Parameters
- ----------
- my_time_scoping : Scoping or int
-
- Examples
- --------
- >>> from ansys.dpf import core as dpf
- >>> op = dpf.operators.mesh.meshes_provider()
- >>> op.inputs.time_scoping.connect(my_time_scoping)
- >>> # or
- >>> op.inputs.time_scoping(my_time_scoping)
- """
- return self._time_scoping
-
- @property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
-
- Result file container allowed to be kept open
- to cache data
-
- Parameters
- ----------
- my_streams_container : StreamsContainer
-
- Examples
- --------
- >>> from ansys.dpf import core as dpf
- >>> op = dpf.operators.mesh.meshes_provider()
- >>> op.inputs.streams_container.connect(my_streams_container)
- >>> # or
- >>> op.inputs.streams_container(my_streams_container)
- """
- return self._streams_container
@property
def data_sources(self):
"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
-
Parameters
----------
my_data_sources : DataSources
@@ -282,52 +146,6 @@ def data_sources(self):
"""
return self._data_sources
- @property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
-
- If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).
-
- Parameters
- ----------
- my_read_cyclic : int
-
- Examples
- --------
- >>> from ansys.dpf import core as dpf
- >>> op = dpf.operators.mesh.meshes_provider()
- >>> op.inputs.read_cyclic.connect(my_read_cyclic)
- >>> # or
- >>> op.inputs.read_cyclic(my_read_cyclic)
- """
- return self._read_cyclic
-
- @property
- def region_scoping(self):
- """Allows to connect region_scoping input to the operator.
-
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
-
- Parameters
- ----------
- my_region_scoping : Scoping or int
-
- Examples
- --------
- >>> from ansys.dpf import core as dpf
- >>> op = dpf.operators.mesh.meshes_provider()
- >>> op.inputs.region_scoping.connect(my_region_scoping)
- >>> # or
- >>> op.inputs.region_scoping(my_region_scoping)
- """
- return self._region_scoping
-
class OutputsMeshesProvider(_Outputs):
"""Intermediate class used to get outputs from
@@ -338,27 +156,27 @@ class OutputsMeshesProvider(_Outputs):
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.meshes_provider()
>>> # Connect inputs : op.inputs. ...
- >>> result_meshes = op.outputs.meshes()
+ >>> result_meshes_op = op.outputs.meshes_op()
"""
def __init__(self, op: Operator):
super().__init__(meshes_provider._spec().outputs, op)
- self._meshes = Output(meshes_provider._spec().output_pin(0), 0, op)
- self._outputs.append(self._meshes)
+ self._meshes_op = Output(meshes_provider._spec().output_pin(0), 0, op)
+ self._outputs.append(self._meshes_op)
@property
- def meshes(self):
- """Allows to get meshes output of the operator
+ def meshes_op(self):
+ """Allows to get meshes_op output of the operator
Returns
----------
- my_meshes : MeshesContainer
+ my_meshes_op : MeshesContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.meshes_provider()
>>> # Connect inputs : op.inputs. ...
- >>> result_meshes = op.outputs.meshes()
+ >>> result_meshes_op = op.outputs.meshes_op()
""" # noqa: E501
- return self._meshes
+ return self._meshes_op
diff --git a/src/ansys/dpf/core/operators/mesh/skin.py b/src/ansys/dpf/core/operators/mesh/skin.py
index 28e7cc1b3cd..1c910c7e0fa 100644
--- a/src/ansys/dpf/core/operators/mesh/skin.py
+++ b/src/ansys/dpf/core/operators/mesh/skin.py
@@ -29,6 +29,10 @@ class skin(Operator):
mesh shell elements (boolean = 1) are
duplicated, one per each orientation,
or (boolean = 0) remain unchanged.
+ add_beam : bool, optional
+ If input mesh contains beam elements, output
+ mesh beam elements (boolean = 1) are
+ added or (boolean = 0) are ignored.
Examples
@@ -45,12 +49,15 @@ class skin(Operator):
>>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
>>> my_duplicate_shell = bool()
>>> op.inputs.duplicate_shell.connect(my_duplicate_shell)
+ >>> my_add_beam = bool()
+ >>> op.inputs.add_beam.connect(my_add_beam)
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.mesh.skin(
... mesh=my_mesh,
... mesh_scoping=my_mesh_scoping,
... duplicate_shell=my_duplicate_shell,
+ ... add_beam=my_add_beam,
... )
>>> # Get output data
@@ -66,6 +73,7 @@ def __init__(
mesh=None,
mesh_scoping=None,
duplicate_shell=None,
+ add_beam=None,
config=None,
server=None,
):
@@ -78,6 +86,8 @@ def __init__(
self.inputs.mesh_scoping.connect(mesh_scoping)
if duplicate_shell is not None:
self.inputs.duplicate_shell.connect(duplicate_shell)
+ if add_beam is not None:
+ self.inputs.add_beam.connect(add_beam)
@staticmethod
def _spec():
@@ -111,6 +121,14 @@ def _spec():
duplicated, one per each orientation,
or (boolean = 0) remain unchanged.""",
),
+ 3: PinSpecification(
+ name="add_beam",
+ type_names=["bool"],
+ optional=True,
+ document="""If input mesh contains beam elements, output
+ mesh beam elements (boolean = 1) are
+ added or (boolean = 0) are ignored.""",
+ ),
},
map_output_pin_spec={
0: PinSpecification(
@@ -210,6 +228,8 @@ class InputsSkin(_Inputs):
>>> op.inputs.mesh_scoping.connect(my_mesh_scoping)
>>> my_duplicate_shell = bool()
>>> op.inputs.duplicate_shell.connect(my_duplicate_shell)
+ >>> my_add_beam = bool()
+ >>> op.inputs.add_beam.connect(my_add_beam)
"""
def __init__(self, op: Operator):
@@ -220,6 +240,8 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh_scoping)
self._duplicate_shell = Input(skin._spec().input_pin(2), 2, op, -1)
self._inputs.append(self._duplicate_shell)
+ self._add_beam = Input(skin._spec().input_pin(3), 3, op, -1)
+ self._inputs.append(self._add_beam)
@property
def mesh(self):
@@ -286,6 +308,28 @@ def duplicate_shell(self):
"""
return self._duplicate_shell
+ @property
+ def add_beam(self):
+ """Allows to connect add_beam input to the operator.
+
+ If input mesh contains beam elements, output
+ mesh beam elements (boolean = 1) are
+ added or (boolean = 0) are ignored.
+
+ Parameters
+ ----------
+ my_add_beam : bool
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.mesh.skin()
+ >>> op.inputs.add_beam.connect(my_add_beam)
+ >>> # or
+ >>> op.inputs.add_beam(my_add_beam)
+ """
+ return self._add_beam
+
class OutputsSkin(_Outputs):
"""Intermediate class used to get outputs from
diff --git a/src/ansys/dpf/core/operators/scoping/from_data_model_to_dpf.py b/src/ansys/dpf/core/operators/scoping/from_data_model_to_dpf.py
new file mode 100644
index 00000000000..67d0f107a61
--- /dev/null
+++ b/src/ansys/dpf/core/operators/scoping/from_data_model_to_dpf.py
@@ -0,0 +1,183 @@
+"""
+from_data_model_to_dpf
+======================
+Autogenerated DPF operator classes.
+"""
+
+from warnings import warn
+from ansys.dpf.core.dpf_operator import Operator
+from ansys.dpf.core.inputs import Input, _Inputs
+from ansys.dpf.core.outputs import Output, _Outputs
+from ansys.dpf.core.operators.specification import PinSpecification, Specification
+
+
+class from_data_model_to_dpf(Operator):
+ """Translate a data model Reference into a DPF Scoping.
+
+ Parameters
+ ----------
+ reference : GenericDataContainer
+
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+
+ >>> # Instantiate operator
+ >>> op = dpf.operators.scoping.from_data_model_to_dpf()
+
+ >>> # Make input connections
+ >>> my_reference = dpf.GenericDataContainer()
+ >>> op.inputs.reference.connect(my_reference)
+
+ >>> # Instantiate operator and connect inputs in one line
+ >>> op = dpf.operators.scoping.from_data_model_to_dpf(
+ ... reference=my_reference,
+ ... )
+
+ >>> # Get output data
+ >>> result_scoping = op.outputs.scoping()
+ """
+
+ def __init__(self, reference=None, config=None, server=None):
+ super().__init__(
+ name="scoping::from_data_model_to_dpf", config=config, server=server
+ )
+ self._inputs = InputsFromDataModelToDpf(self)
+ self._outputs = OutputsFromDataModelToDpf(self)
+ if reference is not None:
+ self.inputs.reference.connect(reference)
+
+ @staticmethod
+ def _spec():
+ description = """Translate a data model Reference into a DPF Scoping."""
+ spec = Specification(
+ description=description,
+ map_input_pin_spec={
+ 0: PinSpecification(
+ name="reference",
+ type_names=["generic_data_container"],
+ optional=False,
+ document="""""",
+ name_derived_class=["reference"],
+ ),
+ },
+ map_output_pin_spec={
+ 0: PinSpecification(
+ name="scoping",
+ type_names=["scoping"],
+ optional=False,
+ document="""""",
+ ),
+ },
+ )
+ return spec
+
+ @staticmethod
+ def default_config(server=None):
+ """Returns the default config of the operator.
+
+ This config can then be changed to the user needs and be used to
+ instantiate the operator. The Configuration allows to customize
+ how the operation will be processed by the operator.
+
+ Parameters
+ ----------
+ server : server.DPFServer, optional
+ Server with channel connected to the remote or local instance. When
+ ``None``, attempts to use the global server.
+ """
+ return Operator.default_config(
+ name="scoping::from_data_model_to_dpf", server=server
+ )
+
+ @property
+ def inputs(self):
+ """Enables to connect inputs to the operator
+
+ Returns
+ --------
+ inputs : InputsFromDataModelToDpf
+ """
+ return super().inputs
+
+ @property
+ def outputs(self):
+ """Enables to get outputs of the operator by evaluating it
+
+ Returns
+ --------
+ outputs : OutputsFromDataModelToDpf
+ """
+ return super().outputs
+
+
+class InputsFromDataModelToDpf(_Inputs):
+ """Intermediate class used to connect user inputs to
+ from_data_model_to_dpf operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.scoping.from_data_model_to_dpf()
+ >>> my_reference = dpf.GenericDataContainer()
+ >>> op.inputs.reference.connect(my_reference)
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(from_data_model_to_dpf._spec().inputs, op)
+ self._reference = Input(from_data_model_to_dpf._spec().input_pin(0), 0, op, -1)
+ self._inputs.append(self._reference)
+
+ @property
+ def reference(self):
+ """Allows to connect reference input to the operator.
+
+ Parameters
+ ----------
+ my_reference : GenericDataContainer
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.scoping.from_data_model_to_dpf()
+ >>> op.inputs.reference.connect(my_reference)
+ >>> # or
+ >>> op.inputs.reference(my_reference)
+ """
+ return self._reference
+
+
+class OutputsFromDataModelToDpf(_Outputs):
+ """Intermediate class used to get outputs from
+ from_data_model_to_dpf operator.
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.scoping.from_data_model_to_dpf()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_scoping = op.outputs.scoping()
+ """
+
+ def __init__(self, op: Operator):
+ super().__init__(from_data_model_to_dpf._spec().outputs, op)
+ self._scoping = Output(from_data_model_to_dpf._spec().output_pin(0), 0, op)
+ self._outputs.append(self._scoping)
+
+ @property
+ def scoping(self):
+ """Allows to get scoping output of the operator
+
+ Returns
+ ----------
+ my_scoping : Scoping
+
+ Examples
+ --------
+ >>> from ansys.dpf import core as dpf
+ >>> op = dpf.operators.scoping.from_data_model_to_dpf()
+ >>> # Connect inputs : op.inputs. ...
+ >>> result_scoping = op.outputs.scoping()
+ """ # noqa: E501
+ return self._scoping
diff --git a/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll b/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll
index 806b479d27b..a6bd910d55e 100644
Binary files a/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll and b/src/ansys/dpf/gatebin/Ans.Dpf.GrpcClient.dll differ
diff --git a/src/ansys/dpf/gatebin/DPFClientAPI.dll b/src/ansys/dpf/gatebin/DPFClientAPI.dll
index d5a205cfa6b..ba303a71fd9 100644
Binary files a/src/ansys/dpf/gatebin/DPFClientAPI.dll and b/src/ansys/dpf/gatebin/DPFClientAPI.dll differ
diff --git a/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so b/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so
index 660f71c7e2b..866deb84833 100644
Binary files a/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so and b/src/ansys/dpf/gatebin/libAns.Dpf.GrpcClient.so differ
diff --git a/src/ansys/dpf/gatebin/libDPFClientAPI.so b/src/ansys/dpf/gatebin/libDPFClientAPI.so
index 5c6af9d8cdb..7a6b87709f6 100644
Binary files a/src/ansys/dpf/gatebin/libDPFClientAPI.so and b/src/ansys/dpf/gatebin/libDPFClientAPI.so differ